Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

llms: rename convenience function to GenerateFromSinglePrompt #537

Merged
merged 1 commit into from
Jan 20, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .golangci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ linters:
- varnamelen
- nlreturn
- gomnd
- goerr113
- wrapcheck # TODO: we should probably enable this one (at least for new code).
- testpackage
- nolintlint # see https://github.com/golangci/golangci-lint/issues/3228.
Expand Down
2 changes: 1 addition & 1 deletion llms/anthropic/anthropicllm.go
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ func newClient(opts ...Option) (*anthropicclient.Client, error) {

// Call requests a completion for the given prompt.
func (o *LLM) Call(ctx context.Context, prompt string, options ...llms.CallOption) (string, error) {
return llms.CallLLM(ctx, o, prompt, options...)
return llms.GenerateFromSinglePrompt(ctx, o, prompt, options...)
}

// GenerateContent implements the Model interface.
Expand Down
2 changes: 1 addition & 1 deletion llms/cohere/coherellm.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ type LLM struct {
var _ llms.Model = (*LLM)(nil)

func (o *LLM) Call(ctx context.Context, prompt string, options ...llms.CallOption) (string, error) {
return llms.CallLLM(ctx, o, prompt, options...)
return llms.GenerateFromSinglePrompt(ctx, o, prompt, options...)
}

// GenerateContent implements the Model interface.
Expand Down
2 changes: 1 addition & 1 deletion llms/ernie/erniellm.go
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ doc: https://cloud.baidu.com/doc/WENXINWORKSHOP/s/flfmc9do2`, ernieclient.ErrNot
}

func (o *LLM) Call(ctx context.Context, prompt string, options ...llms.CallOption) (string, error) {
return llms.CallLLM(ctx, o, prompt, options...)
return llms.GenerateFromSinglePrompt(ctx, o, prompt, options...)
}

// GenerateContent implements the Model interface.
Expand Down
4 changes: 2 additions & 2 deletions llms/googleai/googleai_llm.go
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
// package googleai implements a langchaingo provider for Google AI LLMs.
// See https://ai.google.dev/ for more details and documetnation.
//
//nolint:goerr113, lll
// nolint: lll
package googleai

import (
Expand Down Expand Up @@ -64,7 +64,7 @@ func NewGoogleAI(ctx context.Context, opts ...Option) (*GoogleAI, error) {

// Call implements the [llms.Model] interface.
func (g *GoogleAI) Call(ctx context.Context, prompt string, options ...llms.CallOption) (string, error) {
return llms.CallLLM(ctx, g, prompt, options...)
return llms.GenerateFromSinglePrompt(ctx, g, prompt, options...)
}

// GenerateContent implements the [llms.Model] interface.
Expand Down
2 changes: 1 addition & 1 deletion llms/huggingface/huggingfacellm.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ var _ llms.Model = (*LLM)(nil)

// Call implements the LLM interface.
func (o *LLM) Call(ctx context.Context, prompt string, options ...llms.CallOption) (string, error) {
return llms.CallLLM(ctx, o, prompt, options...)
return llms.GenerateFromSinglePrompt(ctx, o, prompt, options...)
}

// GenerateContent implements the Model interface.
Expand Down
10 changes: 6 additions & 4 deletions llms/llms.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,11 @@ type Model interface {
Call(ctx context.Context, prompt string, options ...CallOption) (string, error)
}

// CallLLM is a helper function for implementing Call in terms of
// GenerateContent. It's aimed to be used by Model providers.
func CallLLM(ctx context.Context, llm Model, prompt string, options ...CallOption) (string, error) {
// GenerateFromSingle prompt is a convenience function for calling an LLM with
// a single string prompt, expecting a single string response. It's useful for
// simple, string-only interactions and provides a slightly more ergonomic API
// than the more general [llms.Model.GenerateContent].
func GenerateFromSinglePrompt(ctx context.Context, llm Model, prompt string, options ...CallOption) (string, error) {
msg := MessageContent{
Role: schema.ChatMessageTypeHuman,
Parts: []ContentPart{TextContent{prompt}},
Expand All @@ -43,7 +45,7 @@ func CallLLM(ctx context.Context, llm Model, prompt string, options ...CallOptio

choices := resp.Choices
if len(choices) < 1 {
return "", errors.New("empty response from model") //nolint:goerr113
return "", errors.New("empty response from model")
}
c1 := choices[0]
return c1.Content, nil
Expand Down
2 changes: 1 addition & 1 deletion llms/local/localllm.go
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ var _ llms.Model = (*LLM)(nil)

// Call calls the local LLM binary with the given prompt.
func (o *LLM) Call(ctx context.Context, prompt string, options ...llms.CallOption) (string, error) {
return llms.CallLLM(ctx, o, prompt, options...)
return llms.GenerateFromSinglePrompt(ctx, o, prompt, options...)
}

func (o *LLM) appendGlobalsToArgs(opts llms.CallOptions) {
Expand Down
2 changes: 1 addition & 1 deletion llms/ollama/ollamallm.go
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ func New(opts ...Option) (*LLM, error) {

// Call Implement the call interface for LLM.
func (o *LLM) Call(ctx context.Context, prompt string, options ...llms.CallOption) (string, error) {
return llms.CallLLM(ctx, o, prompt, options...)
return llms.GenerateFromSinglePrompt(ctx, o, prompt, options...)
}

// GenerateContent implements the Model interface.
Expand Down
2 changes: 1 addition & 1 deletion llms/openai/openaillm.go
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ func New(opts ...Option) (*LLM, error) {

// Call requests a completion for the given prompt.
func (o *LLM) Call(ctx context.Context, prompt string, options ...llms.CallOption) (string, error) {
return llms.CallLLM(ctx, o, prompt, options...)
return llms.GenerateFromSinglePrompt(ctx, o, prompt, options...)
}

// GenerateContent implements the Model interface.
Expand Down
2 changes: 1 addition & 1 deletion llms/vertexai/vertexai_palm_llm.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ var _ llms.Model = (*LLM)(nil)

// Call requests a completion for the given prompt.
func (o *LLM) Call(ctx context.Context, prompt string, options ...llms.CallOption) (string, error) {
return llms.CallLLM(ctx, o, prompt, options...)
return llms.GenerateFromSinglePrompt(ctx, o, prompt, options...)
}

// GenerateContent implements the Model interface.
Expand Down
Loading