From 2589eec221a13953b7325a0e1d3b676d0e588cb9 Mon Sep 17 00:00:00 2001 From: Simon Klinkert Date: Tue, 5 Nov 2024 12:25:00 +0100 Subject: [PATCH 1/8] Implement perplexity agent tool This PR enables AI Agents to use perplexity AI in order to retrieve data from the web. --- tools/perplexity/README.md | 62 +++++++++++++++++++++++++ tools/perplexity/perplexity.go | 70 +++++++++++++++++++++++++++++ tools/perplexity/perplexity_test.go | 55 +++++++++++++++++++++++ 3 files changed, 187 insertions(+) create mode 100644 tools/perplexity/README.md create mode 100644 tools/perplexity/perplexity.go create mode 100644 tools/perplexity/perplexity_test.go diff --git a/tools/perplexity/README.md b/tools/perplexity/README.md new file mode 100644 index 000000000..59628f175 --- /dev/null +++ b/tools/perplexity/README.md @@ -0,0 +1,62 @@ + +# Perplexity Tool Integration for Agents + +Use perplexity in your AI Agent to enrich it with data from the web. + +Full code example: + +```go +package main + +import ( + "context" + "fmt" + "os" + + "github.com/tmc/langchaingo/agents" + "github.com/tmc/langchaingo/callbacks" + "github.com/tmc/langchaingo/chains" + "github.com/tmc/langchaingo/llms/openai" + "github.com/tmc/langchaingo/tools" + "github.com/tmc/langchaingo/tools/perplexity" +) + +func main() { + if err := run(); err != nil { + fmt.Fprintln(os.Stderr, err) + os.Exit(1) + } +} + +func run() error { + llm, err := openai.New( + openai.WithModel("gpt-4o-mini"), + openai.WithCallback(callbacks.LogHandler{}), + ) + if err != nil { + return err + } + + perpl, err := perplexity.NewPerplexity(perplexity.ModelLlamaSonarSmall) + if err != nil { + return err + } + + agentTools := []tools.Tool{ + perpl, + } + + agent := agents.NewOneShotAgent(llm, + agentTools, + agents.WithMaxIterations(2), + ) + executor := agents.NewExecutor(agent) + + question := "what's the latest and best LLM on the market at the moment?" + answer, err := chains.Run(context.Background(), executor, question) + + fmt.Println(answer) + + return err +} +``` \ No newline at end of file diff --git a/tools/perplexity/perplexity.go b/tools/perplexity/perplexity.go new file mode 100644 index 000000000..826c25d69 --- /dev/null +++ b/tools/perplexity/perplexity.go @@ -0,0 +1,70 @@ +package perplexity + +import ( + "context" + "fmt" + "os" + + "github.com/tmc/langchaingo/llms" + "github.com/tmc/langchaingo/llms/openai" +) + +type Model string + +// Model pricing overview: https://docs.perplexity.ai/guides/pricing +const ( + ModelLlamaSonarSmall Model = "llama-3.1-sonar-small-128k-online" + ModelLlamaSonarLarge Model = "llama-3.1-sonar-large-128k-online" + ModelLlamaSonarHuge Model = "llama-3.1-sonar-huge-128k-online" +) + +type Perplexity struct { + llm *openai.LLM +} + +func NewPerplexity(model Model) (*Perplexity, error) { + perplexity := &Perplexity{} + var err error + + apiKey := os.Getenv("PERPLEXITY_API_KEY") + if apiKey == "" { + return nil, fmt.Errorf("PERPLEXITY_API_KEY not set") + } + + perplexity.llm, err = openai.New( + openai.WithModel(string(model)), + openai.WithBaseURL("https://api.perplexity.ai"), + openai.WithToken(apiKey), + ) + if err != nil { + return nil, err + } + + return perplexity, nil +} + +func (p *Perplexity) Name() string { + return "PerplexityAI" +} + +func (p *Perplexity) Description() string { + return "Perplexity AI has access to a wide range of information, as it functions as an AI-powered search engine that indexes, analyzes, and summarizes content from across the internet." +} + +func (p *Perplexity) Call(ctx context.Context, input string) (string, error) { + content := []llms.MessageContent{ + llms.TextParts(llms.ChatMessageTypeHuman, input), + } + + var generatedText string + _, err := p.llm.GenerateContent(ctx, content, + llms.WithStreamingFunc(func(_ context.Context, chunk []byte) error { + generatedText += string(chunk) + return nil + })) + if err != nil { + return "", err + } + + return generatedText, nil +} diff --git a/tools/perplexity/perplexity_test.go b/tools/perplexity/perplexity_test.go new file mode 100644 index 000000000..90fa81220 --- /dev/null +++ b/tools/perplexity/perplexity_test.go @@ -0,0 +1,55 @@ +package perplexity + +import ( + "context" + "os" + "strings" + "testing" + + "github.com/tmc/langchaingo/agents" + "github.com/tmc/langchaingo/chains" + "github.com/tmc/langchaingo/llms/openai" + "github.com/tmc/langchaingo/tools" +) + +func TestRun(t *testing.T) { + t.Parallel() + + if os.Getenv("PERPLEXITY_API_KEY") == "" { + t.Skip("PERPLEXITY_API_KEY not set") + } + if os.Getenv("OPENAI_API_KEY") == "" { + t.Skip("OPENAI_API_KEY not set") + } + + llm, err := openai.New() + if err != nil { + t.Fatalf("failed to create LLM: %v", err) + } + + perpl, err := NewPerplexity(ModelLlamaSonarSmall) + if err != nil { + t.Fatalf("failed to create Perplexity tool: %v", err) + } + + agentTools := []tools.Tool{ + perpl, + } + + agent := agents.NewOneShotAgent(llm, + agentTools, + agents.WithMaxIterations(1), + ) + executor := agents.NewExecutor(agent) + + question := "what is the largest country in the world by total area?" + answer, err := chains.Run(context.Background(), executor, question) + if err != nil { + t.Fatalf("failed to run chains: %v", err) + } + + const expectedAnswer = "Russia" + if !strings.Contains(answer, expectedAnswer) { + t.Errorf("expected answer to contain %q, got %q", expectedAnswer, answer) + } +} From ff93722af1cad13cc496b3c2caa1993f158e3a1c Mon Sep 17 00:00:00 2001 From: Simon Klinkert Date: Sun, 5 Jan 2025 22:32:31 +0100 Subject: [PATCH 2/8] api key as option --- tools/perplexity/perplexity.go | 46 ++++++++++++++++++++++++++-------- 1 file changed, 36 insertions(+), 10 deletions(-) diff --git a/tools/perplexity/perplexity.go b/tools/perplexity/perplexity.go index 826c25d69..8360b9020 100644 --- a/tools/perplexity/perplexity.go +++ b/tools/perplexity/perplexity.go @@ -18,29 +18,55 @@ const ( ModelLlamaSonarHuge Model = "llama-3.1-sonar-huge-128k-online" ) +type Option func(*options) + +type options struct { + apiKey string + model Model +} + +func WithAPIKey(apiKey string) Option { + return func(o *options) { + o.apiKey = apiKey + } +} + +func WithModel(model Model) Option { + return func(o *options) { + o.model = model + } +} + type Perplexity struct { llm *openai.LLM } -func NewPerplexity(model Model) (*Perplexity, error) { - perplexity := &Perplexity{} - var err error +func NewPerplexity(opts ...Option) (*Perplexity, error) { + options := &options{ + apiKey: os.Getenv("PERPLEXITY_API_KEY"), + model: ModelLlamaSonarSmall, // Default model + } + + for _, opt := range opts { + opt(options) + } - apiKey := os.Getenv("PERPLEXITY_API_KEY") - if apiKey == "" { - return nil, fmt.Errorf("PERPLEXITY_API_KEY not set") + if options.apiKey == "" { + return nil, fmt.Errorf("Perplexity API key not set") } - perplexity.llm, err = openai.New( - openai.WithModel(string(model)), + llm, err := openai.New( + openai.WithModel(string(options.model)), openai.WithBaseURL("https://api.perplexity.ai"), - openai.WithToken(apiKey), + openai.WithToken(options.apiKey), ) if err != nil { return nil, err } - return perplexity, nil + return &Perplexity{ + llm: llm, + }, nil } func (p *Perplexity) Name() string { From dca2a9c9730ddb1964c673343ea80a35c024cda2 Mon Sep 17 00:00:00 2001 From: Simon Klinkert Date: Sun, 5 Jan 2025 22:34:07 +0100 Subject: [PATCH 3/8] add doc.go --- tools/perplexity/README.md | 62 -------------------------------------- tools/perplexity/doc.go | 40 ++++++++++++++++++++++++ 2 files changed, 40 insertions(+), 62 deletions(-) delete mode 100644 tools/perplexity/README.md create mode 100644 tools/perplexity/doc.go diff --git a/tools/perplexity/README.md b/tools/perplexity/README.md deleted file mode 100644 index 59628f175..000000000 --- a/tools/perplexity/README.md +++ /dev/null @@ -1,62 +0,0 @@ - -# Perplexity Tool Integration for Agents - -Use perplexity in your AI Agent to enrich it with data from the web. - -Full code example: - -```go -package main - -import ( - "context" - "fmt" - "os" - - "github.com/tmc/langchaingo/agents" - "github.com/tmc/langchaingo/callbacks" - "github.com/tmc/langchaingo/chains" - "github.com/tmc/langchaingo/llms/openai" - "github.com/tmc/langchaingo/tools" - "github.com/tmc/langchaingo/tools/perplexity" -) - -func main() { - if err := run(); err != nil { - fmt.Fprintln(os.Stderr, err) - os.Exit(1) - } -} - -func run() error { - llm, err := openai.New( - openai.WithModel("gpt-4o-mini"), - openai.WithCallback(callbacks.LogHandler{}), - ) - if err != nil { - return err - } - - perpl, err := perplexity.NewPerplexity(perplexity.ModelLlamaSonarSmall) - if err != nil { - return err - } - - agentTools := []tools.Tool{ - perpl, - } - - agent := agents.NewOneShotAgent(llm, - agentTools, - agents.WithMaxIterations(2), - ) - executor := agents.NewExecutor(agent) - - question := "what's the latest and best LLM on the market at the moment?" - answer, err := chains.Run(context.Background(), executor, question) - - fmt.Println(answer) - - return err -} -``` \ No newline at end of file diff --git a/tools/perplexity/doc.go b/tools/perplexity/doc.go new file mode 100644 index 000000000..368c465d9 --- /dev/null +++ b/tools/perplexity/doc.go @@ -0,0 +1,40 @@ +// Package perplexity provides integration with Perplexity AI's API for AI agents. +// +// Perplexity AI functions as an AI-powered search engine that indexes, analyzes, +// and summarizes content from across the internet. This package allows you to +// integrate Perplexity's capabilities into your AI agents to enrich them with +// up-to-date web data. +// +// Example usage: +// +// llm, err := openai.New( +// openai.WithModel("gpt-4-mini"), +// openai.WithCallback(callbacks.LogHandler{}), +// ) +// if err != nil { +// return err +// } +// +// // Create a new Perplexity instance +// perpl, err := perplexity.NewPerplexity( +// perplexity.WithModel(perplexity.ModelLlamaSonarSmall), +// perplexity.WithAPIKey("your-api-key"), // Optional: defaults to PERPLEXITY_API_KEY env var +// ) +// if err != nil { +// return err +// } +// +// // Add Perplexity as a tool for your agent +// agentTools := []tools.Tool{ +// perpl, +// } +// +// // Create and use the agent +// agent := agents.NewOneShotAgent(llm, +// agentTools, +// agents.WithMaxIterations(2), +// ) +// executor := agents.NewExecutor(agent) +// +// answer, err := chains.Run(context.Background(), executor, "your question here") +package perplexity From 4b3988740bfbeddee79009e2dcb839c847902681 Mon Sep 17 00:00:00 2001 From: Simon Klinkert Date: Sun, 5 Jan 2025 22:46:37 +0100 Subject: [PATCH 4/8] review changes --- tools/perplexity/perplexity.go | 19 ++--- tools/perplexity/perplexity_test.go | 106 ++++++++++++++++++---------- 2 files changed, 80 insertions(+), 45 deletions(-) diff --git a/tools/perplexity/perplexity.go b/tools/perplexity/perplexity.go index 8360b9020..0dda1f865 100644 --- a/tools/perplexity/perplexity.go +++ b/tools/perplexity/perplexity.go @@ -7,6 +7,7 @@ import ( "github.com/tmc/langchaingo/llms" "github.com/tmc/langchaingo/llms/openai" + "github.com/tmc/langchaingo/tools" ) type Model string @@ -37,11 +38,13 @@ func WithModel(model Model) Option { } } -type Perplexity struct { +type Tool struct { llm *openai.LLM } -func NewPerplexity(opts ...Option) (*Perplexity, error) { +var _ tools.Tool = (*Tool)(nil) + +func New(opts ...Option) (*Tool, error) { options := &options{ apiKey: os.Getenv("PERPLEXITY_API_KEY"), model: ModelLlamaSonarSmall, // Default model @@ -52,7 +55,7 @@ func NewPerplexity(opts ...Option) (*Perplexity, error) { } if options.apiKey == "" { - return nil, fmt.Errorf("Perplexity API key not set") + return nil, fmt.Errorf("PERPLEXITY_API_KEY key not set") } llm, err := openai.New( @@ -64,26 +67,26 @@ func NewPerplexity(opts ...Option) (*Perplexity, error) { return nil, err } - return &Perplexity{ + return &Tool{ llm: llm, }, nil } -func (p *Perplexity) Name() string { +func (t *Tool) Name() string { return "PerplexityAI" } -func (p *Perplexity) Description() string { +func (t *Tool) Description() string { return "Perplexity AI has access to a wide range of information, as it functions as an AI-powered search engine that indexes, analyzes, and summarizes content from across the internet." } -func (p *Perplexity) Call(ctx context.Context, input string) (string, error) { +func (t *Tool) Call(ctx context.Context, input string) (string, error) { content := []llms.MessageContent{ llms.TextParts(llms.ChatMessageTypeHuman, input), } var generatedText string - _, err := p.llm.GenerateContent(ctx, content, + _, err := t.llm.GenerateContent(ctx, content, llms.WithStreamingFunc(func(_ context.Context, chunk []byte) error { generatedText += string(chunk) return nil diff --git a/tools/perplexity/perplexity_test.go b/tools/perplexity/perplexity_test.go index 90fa81220..8777b1dab 100644 --- a/tools/perplexity/perplexity_test.go +++ b/tools/perplexity/perplexity_test.go @@ -3,53 +3,85 @@ package perplexity import ( "context" "os" - "strings" "testing" - "github.com/tmc/langchaingo/agents" - "github.com/tmc/langchaingo/chains" - "github.com/tmc/langchaingo/llms/openai" - "github.com/tmc/langchaingo/tools" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) -func TestRun(t *testing.T) { - t.Parallel() - - if os.Getenv("PERPLEXITY_API_KEY") == "" { - t.Skip("PERPLEXITY_API_KEY not set") - } - if os.Getenv("OPENAI_API_KEY") == "" { - t.Skip("OPENAI_API_KEY not set") - } - - llm, err := openai.New() - if err != nil { - t.Fatalf("failed to create LLM: %v", err) +func TestNew(t *testing.T) { + tests := []struct { + name string + setup func(*testing.T) []Option + wantErr bool + }{ + { + name: "no api key", + setup: func(t *testing.T) []Option { + t.Setenv("PERPLEXITY_API_KEY", "") + return nil + }, + wantErr: true, + }, + { + name: "with env api key", + setup: func(t *testing.T) []Option { + t.Setenv("PERPLEXITY_API_KEY", "test-key") + return nil + }, + wantErr: false, + }, + { + name: "with option api key", + setup: func(t *testing.T) []Option { + t.Setenv("PERPLEXITY_API_KEY", "") + return []Option{WithAPIKey("test-key")} + }, + wantErr: false, + }, + { + name: "with custom model", + setup: func(t *testing.T) []Option { + t.Setenv("PERPLEXITY_API_KEY", "test-key") + return []Option{WithModel(ModelLlamaSonarLarge)} + }, + wantErr: false, + }, } - perpl, err := NewPerplexity(ModelLlamaSonarSmall) - if err != nil { - t.Fatalf("failed to create Perplexity tool: %v", err) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + opts := tt.setup(t) + + tool, err := New(opts...) + if tt.wantErr { + assert.Error(t, err) + assert.Nil(t, tool) + } else { + assert.NoError(t, err) + assert.NotNil(t, tool) + } + }) } +} - agentTools := []tools.Tool{ - perpl, +func TestTool_Integration(t *testing.T) { + if os.Getenv("PERPLEXITY_API_KEY") == "" { + t.Skip("PERPLEXITY_API_KEY not set") } - agent := agents.NewOneShotAgent(llm, - agentTools, - agents.WithMaxIterations(1), - ) - executor := agents.NewExecutor(agent) + tool, err := New() + require.NoError(t, err) + require.NotNil(t, tool) - question := "what is the largest country in the world by total area?" - answer, err := chains.Run(context.Background(), executor, question) - if err != nil { - t.Fatalf("failed to run chains: %v", err) - } + // Test Name and Description + assert.Equal(t, "PerplexityAI", tool.Name()) + assert.NotEmpty(t, tool.Description()) - const expectedAnswer = "Russia" - if !strings.Contains(answer, expectedAnswer) { - t.Errorf("expected answer to contain %q, got %q", expectedAnswer, answer) - } + // Test Call functionality + ctx := context.Background() + response, err := tool.Call(ctx, "what is the largest country in the world by total area?") + require.NoError(t, err) + assert.Contains(t, response, "Russia") } From 957b5a027ac1cbee9738f2fdde2b67a0233ee77f Mon Sep 17 00:00:00 2001 From: Simon Klinkert Date: Sun, 5 Jan 2025 22:48:43 +0100 Subject: [PATCH 5/8] gofmt --- tools/perplexity/doc.go | 6 +-- tools/perplexity/perplexity_test.go | 63 ++--------------------------- 2 files changed, 7 insertions(+), 62 deletions(-) diff --git a/tools/perplexity/doc.go b/tools/perplexity/doc.go index 368c465d9..e158f85cb 100644 --- a/tools/perplexity/doc.go +++ b/tools/perplexity/doc.go @@ -16,7 +16,7 @@ // } // // // Create a new Perplexity instance -// perpl, err := perplexity.NewPerplexity( +// perpl, err := perplexity.New( // perplexity.WithModel(perplexity.ModelLlamaSonarSmall), // perplexity.WithAPIKey("your-api-key"), // Optional: defaults to PERPLEXITY_API_KEY env var // ) @@ -30,11 +30,11 @@ // } // // // Create and use the agent -// agent := agents.NewOneShotAgent(llm, +// toolAgent := agents.NewOneShotAgent(llm, // agentTools, // agents.WithMaxIterations(2), // ) -// executor := agents.NewExecutor(agent) +// executor := agents.NewExecutor(toolAgent) // // answer, err := chains.Run(context.Background(), executor, "your question here") package perplexity diff --git a/tools/perplexity/perplexity_test.go b/tools/perplexity/perplexity_test.go index 8777b1dab..c6044da52 100644 --- a/tools/perplexity/perplexity_test.go +++ b/tools/perplexity/perplexity_test.go @@ -9,65 +9,11 @@ import ( "github.com/stretchr/testify/require" ) -func TestNew(t *testing.T) { - tests := []struct { - name string - setup func(*testing.T) []Option - wantErr bool - }{ - { - name: "no api key", - setup: func(t *testing.T) []Option { - t.Setenv("PERPLEXITY_API_KEY", "") - return nil - }, - wantErr: true, - }, - { - name: "with env api key", - setup: func(t *testing.T) []Option { - t.Setenv("PERPLEXITY_API_KEY", "test-key") - return nil - }, - wantErr: false, - }, - { - name: "with option api key", - setup: func(t *testing.T) []Option { - t.Setenv("PERPLEXITY_API_KEY", "") - return []Option{WithAPIKey("test-key")} - }, - wantErr: false, - }, - { - name: "with custom model", - setup: func(t *testing.T) []Option { - t.Setenv("PERPLEXITY_API_KEY", "test-key") - return []Option{WithModel(ModelLlamaSonarLarge)} - }, - wantErr: false, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - opts := tt.setup(t) - - tool, err := New(opts...) - if tt.wantErr { - assert.Error(t, err) - assert.Nil(t, tool) - } else { - assert.NoError(t, err) - assert.NotNil(t, tool) - } - }) - } -} - func TestTool_Integration(t *testing.T) { - if os.Getenv("PERPLEXITY_API_KEY") == "" { + t.Parallel() + + apiKey := os.Getenv("PERPLEXITY_API_KEY") + if apiKey == "" { t.Skip("PERPLEXITY_API_KEY not set") } @@ -75,7 +21,6 @@ func TestTool_Integration(t *testing.T) { require.NoError(t, err) require.NotNil(t, tool) - // Test Name and Description assert.Equal(t, "PerplexityAI", tool.Name()) assert.NotEmpty(t, tool.Description()) From cc536e60c50b3ccce7d8bd1a8b491d70c6e7fe2e Mon Sep 17 00:00:00 2001 From: Simon Klinkert Date: Mon, 6 Jan 2025 15:11:28 +0100 Subject: [PATCH 6/8] comment public symbols --- tools/perplexity/perplexity.go | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/tools/perplexity/perplexity.go b/tools/perplexity/perplexity.go index 0dda1f865..6074eb02f 100644 --- a/tools/perplexity/perplexity.go +++ b/tools/perplexity/perplexity.go @@ -10,15 +10,20 @@ import ( "github.com/tmc/langchaingo/tools" ) +// Model represents a Perplexity AI model type type Model string // Model pricing overview: https://docs.perplexity.ai/guides/pricing const ( + // ModelLlamaSonarSmall is the small version of the Llama Sonar model ModelLlamaSonarSmall Model = "llama-3.1-sonar-small-128k-online" + // ModelLlamaSonarLarge is the large version of the Llama Sonar model ModelLlamaSonarLarge Model = "llama-3.1-sonar-large-128k-online" - ModelLlamaSonarHuge Model = "llama-3.1-sonar-huge-128k-online" + // ModelLlamaSonarHuge is the huge version of the Llama Sonar model + ModelLlamaSonarHuge Model = "llama-3.1-sonar-huge-128k-online" ) +// Option is a function that modifies the options for the Perplexity AI tool type Option func(*options) type options struct { @@ -26,24 +31,28 @@ type options struct { model Model } +// WithAPIKey sets the API key for Perplexity AI func WithAPIKey(apiKey string) Option { return func(o *options) { o.apiKey = apiKey } } +// WithModel sets the model to be used by Perplexity AI func WithModel(model Model) Option { return func(o *options) { o.model = model } } +// Tool implements the Perplexity AI integration type Tool struct { llm *openai.LLM } var _ tools.Tool = (*Tool)(nil) +// New creates a new instance of the Perplexity AI tool with the given options func New(opts ...Option) (*Tool, error) { options := &options{ apiKey: os.Getenv("PERPLEXITY_API_KEY"), @@ -72,14 +81,17 @@ func New(opts ...Option) (*Tool, error) { }, nil } +// Name returns the name of the tool func (t *Tool) Name() string { return "PerplexityAI" } +// Description returns a description of the Perplexity AI tool's capabilities func (t *Tool) Description() string { return "Perplexity AI has access to a wide range of information, as it functions as an AI-powered search engine that indexes, analyzes, and summarizes content from across the internet." } +// Call executes a query against the Perplexity AI model and returns the response func (t *Tool) Call(ctx context.Context, input string) (string, error) { content := []llms.MessageContent{ llms.TextParts(llms.ChatMessageTypeHuman, input), From bbd85f1fbf97f1bc6ef2db4f8f8e117f162d7842 Mon Sep 17 00:00:00 2001 From: Simon Klinkert Date: Mon, 6 Jan 2025 15:14:46 +0100 Subject: [PATCH 7/8] handle callbacks --- tools/perplexity/perplexity.go | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/tools/perplexity/perplexity.go b/tools/perplexity/perplexity.go index 6074eb02f..fb5e5a5db 100644 --- a/tools/perplexity/perplexity.go +++ b/tools/perplexity/perplexity.go @@ -5,6 +5,7 @@ import ( "fmt" "os" + "github.com/tmc/langchaingo/callbacks" "github.com/tmc/langchaingo/llms" "github.com/tmc/langchaingo/llms/openai" "github.com/tmc/langchaingo/tools" @@ -47,7 +48,8 @@ func WithModel(model Model) Option { // Tool implements the Perplexity AI integration type Tool struct { - llm *openai.LLM + llm *openai.LLM + CallbacksHandler callbacks.Handler } var _ tools.Tool = (*Tool)(nil) @@ -93,6 +95,10 @@ func (t *Tool) Description() string { // Call executes a query against the Perplexity AI model and returns the response func (t *Tool) Call(ctx context.Context, input string) (string, error) { + if t.CallbacksHandler != nil { + t.CallbacksHandler.HandleToolStart(ctx, input) + } + content := []llms.MessageContent{ llms.TextParts(llms.ChatMessageTypeHuman, input), } @@ -104,8 +110,15 @@ func (t *Tool) Call(ctx context.Context, input string) (string, error) { return nil })) if err != nil { + if t.CallbacksHandler != nil { + t.CallbacksHandler.HandleToolError(ctx, err) + } return "", err } + if t.CallbacksHandler != nil { + t.CallbacksHandler.HandleToolEnd(ctx, generatedText) + } + return generatedText, nil } From a9d2531e2e9653d2c40615f6666f5463c0fa7c34 Mon Sep 17 00:00:00 2001 From: Simon Klinkert Date: Mon, 6 Jan 2025 15:18:41 +0100 Subject: [PATCH 8/8] Comment should end in a period --- tools/perplexity/perplexity.go | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/tools/perplexity/perplexity.go b/tools/perplexity/perplexity.go index fb5e5a5db..ac2ecab52 100644 --- a/tools/perplexity/perplexity.go +++ b/tools/perplexity/perplexity.go @@ -11,20 +11,20 @@ import ( "github.com/tmc/langchaingo/tools" ) -// Model represents a Perplexity AI model type +// Model represents a Perplexity AI model type. type Model string // Model pricing overview: https://docs.perplexity.ai/guides/pricing const ( - // ModelLlamaSonarSmall is the small version of the Llama Sonar model + // ModelLlamaSonarSmall is the small version of the Llama Sonar model. ModelLlamaSonarSmall Model = "llama-3.1-sonar-small-128k-online" - // ModelLlamaSonarLarge is the large version of the Llama Sonar model + // ModelLlamaSonarLarge is the large version of the Llama Sonar model. ModelLlamaSonarLarge Model = "llama-3.1-sonar-large-128k-online" - // ModelLlamaSonarHuge is the huge version of the Llama Sonar model + // ModelLlamaSonarHuge is the huge version of the Llama Sonar model. ModelLlamaSonarHuge Model = "llama-3.1-sonar-huge-128k-online" ) -// Option is a function that modifies the options for the Perplexity AI tool +// Option is a function that modifies the options for the Perplexity AI tool. type Option func(*options) type options struct { @@ -32,21 +32,21 @@ type options struct { model Model } -// WithAPIKey sets the API key for Perplexity AI +// WithAPIKey sets the API key for Perplexity AI. func WithAPIKey(apiKey string) Option { return func(o *options) { o.apiKey = apiKey } } -// WithModel sets the model to be used by Perplexity AI +// WithModel sets the model to be used by Perplexity AI. func WithModel(model Model) Option { return func(o *options) { o.model = model } } -// Tool implements the Perplexity AI integration +// Tool implements the Perplexity AI integration. type Tool struct { llm *openai.LLM CallbacksHandler callbacks.Handler @@ -54,7 +54,7 @@ type Tool struct { var _ tools.Tool = (*Tool)(nil) -// New creates a new instance of the Perplexity AI tool with the given options +// New creates a new instance of the Perplexity AI tool with the given options. func New(opts ...Option) (*Tool, error) { options := &options{ apiKey: os.Getenv("PERPLEXITY_API_KEY"), @@ -83,17 +83,17 @@ func New(opts ...Option) (*Tool, error) { }, nil } -// Name returns the name of the tool +// Name returns the name of the tool. func (t *Tool) Name() string { return "PerplexityAI" } -// Description returns a description of the Perplexity AI tool's capabilities +// Description returns a description of the Perplexity AI tool's capabilities. func (t *Tool) Description() string { return "Perplexity AI has access to a wide range of information, as it functions as an AI-powered search engine that indexes, analyzes, and summarizes content from across the internet." } -// Call executes a query against the Perplexity AI model and returns the response +// Call executes a query against the Perplexity AI model and returns the response. func (t *Tool) Call(ctx context.Context, input string) (string, error) { if t.CallbacksHandler != nil { t.CallbacksHandler.HandleToolStart(ctx, input)