Skip to content

Commit

Permalink
Merge pull request #524 from tmc/clean6
Browse files Browse the repository at this point in the history
all: clean up and update comments
  • Loading branch information
tmc authored Jan 17, 2024
2 parents 690d1bd + 75ee210 commit c1aea7b
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 13 deletions.
6 changes: 4 additions & 2 deletions llms/generatecontent.go
Original file line number Diff line number Diff line change
Expand Up @@ -62,13 +62,15 @@ type BinaryContent struct {
func (BinaryContent) isPart() {}

// ContentResponse is the response returned by a GenerateContent call.
// It can potentially return multiple response choices.
// It can potentially return multiple content choices.
type ContentResponse struct {
Choices []*ContentChoice
}

// ContentChoice is one of the response choices returned by GenerateModel calls.
// ContentChoice is one of the response choices returned by GenerateContent
// calls.
type ContentChoice struct {
// Content is the textual content of a response
Content string

// StopReason is the reason the model stopped generating output.
Expand Down
22 changes: 11 additions & 11 deletions llms/llms.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,23 +9,23 @@ import (

// LLM is an alias for model, for backwards compatibility.
//
// This alias may be removed in the future; please use Model instead.
// This alias may be removed in the future; please use Model
// instead.
type LLM = Model

// Model is an interface multi-modal models implement.
// Note: this is an experimental API.
type Model interface {
// Call is a simplified interface for Model, generating a single string
// response from a single string prompt.
//
// It is here for backwards compatibility only and may be removed in the
// future; please use GenerateContent instead.
Call(ctx context.Context, prompt string, options ...CallOption) (string, error)

// GenerateContent asks the model to generate content from a sequence of
// messages. It's the most general interface for LLMs that support chat-like
// interactions.
// messages. It's the most general interface for multi-modal LLMs that support
// chat-like interactions.
GenerateContent(ctx context.Context, messages []MessageContent, options ...CallOption) (*ContentResponse, error)

// Call is a simplified interface for a text-only Model, generating a single
// string response from a single string prompt.
//
// It is here for backwards compatibility only and may be removed
// in the future; please use GenerateContent instead.
Call(ctx context.Context, prompt string, options ...CallOption) (string, error)
}

// CallLLM is a helper function for implementing Call in terms of
Expand Down

0 comments on commit c1aea7b

Please sign in to comment.