diff --git a/examples/openai-gpt4o-mutil-content/README.md b/examples/openai-gpt4o-mutil-content/README.md new file mode 100644 index 000000000..3f299a96c --- /dev/null +++ b/examples/openai-gpt4o-mutil-content/README.md @@ -0,0 +1,24 @@ +# OpenAI Multi-Content Example + +This example demonstrates how to use the OpenAI model with the LangChain Go library to post multiple messages. + +## What does this example do? + +This nifty little program does the following: + +1. Sets up a connection to the OpenAI API using the GPT-4o model. + +2. Posts multiple messages to the AI, each with a different role (user, assistant, system). + +3. Prints the AI's response for each message. + + +## How to Run + +1. Make sure you have Go installed on your system. +2. Set up your OpenAI API key as an environment variable. +3. Run the program: + +``` +go run openai-gpt4o-mutil-content.go +``` \ No newline at end of file diff --git a/examples/openai-gpt4o-mutil-content/go.mod b/examples/openai-gpt4o-mutil-content/go.mod new file mode 100644 index 000000000..04bdae4dd --- /dev/null +++ b/examples/openai-gpt4o-mutil-content/go.mod @@ -0,0 +1,11 @@ +module github.com/tmc/langchaingo/examples/openai-gpt4o-mutil-content + +go 1.23 + +require github.com/tmc/langchaingo v0.1.13-pre.0.0.20250106145851-f1fde1f9e4a0 + +require ( + github.com/dlclark/regexp2 v1.10.0 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/pkoukk/tiktoken-go v0.1.6 // indirect +) diff --git a/examples/openai-gpt4o-mutil-content/go.sum b/examples/openai-gpt4o-mutil-content/go.sum new file mode 100644 index 000000000..48890ea10 --- /dev/null +++ b/examples/openai-gpt4o-mutil-content/go.sum @@ -0,0 +1,22 @@ +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dlclark/regexp2 v1.10.0 h1:+/GIL799phkJqYW+3YbOd8LCcbHzT0Pbo8zl70MHsq0= +github.com/dlclark/regexp2 v1.10.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/pkoukk/tiktoken-go v0.1.6 h1:JF0TlJzhTbrI30wCvFuiw6FzP2+/bR+FIxUdgEAcUsw= +github.com/pkoukk/tiktoken-go v0.1.6/go.mod h1:9NiV+i9mJKGj1rYOT+njbv+ZwA/zJxYdewGl6qVatpg= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/tmc/langchaingo v0.1.13-pre.0.0.20250106145851-f1fde1f9e4a0 h1:LDu6HkttCQV83dc2A2XlDSg/rYXXHIruZT8jEAkD6xo= +github.com/tmc/langchaingo v0.1.13-pre.0.0.20250106145851-f1fde1f9e4a0/go.mod h1:EeervIv/DNYhSfQSMaql20wMFvhgF7lDaVaatp8lVPw= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo= +sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8= diff --git a/examples/openai-gpt4o-mutil-content/openai-gpt4o-mutil-content.go b/examples/openai-gpt4o-mutil-content/openai-gpt4o-mutil-content.go new file mode 100644 index 000000000..1207a326d --- /dev/null +++ b/examples/openai-gpt4o-mutil-content/openai-gpt4o-mutil-content.go @@ -0,0 +1,35 @@ +package main + +import ( + "context" + "fmt" + "github.com/tmc/langchaingo/llms" + "github.com/tmc/langchaingo/llms/openai" + "log" +) + +func main() { + llm, err := openai.New(openai.WithModel("gpt-4o")) + if err != nil { + log.Fatal(err) + } + ctx := context.Background() + + content := []llms.MessageContent{ + llms.TextParts(llms.ChatMessageTypeSystem, "You are a ocr assistant."), + llms.TextParts(llms.ChatMessageTypeHuman, "which image is this?"), + { + Role: llms.ChatMessageTypeHuman, + Parts: []llms.ContentPart{llms.ImageURLPart("https://github.com/tmc/langchaingo/blob/main/docs/static/img/parrot-icon.png?raw=true")}, + }, + } + + completion, err := llm.GenerateContent(ctx, content, llms.WithStreamingFunc(func(ctx context.Context, chunk []byte) error { + fmt.Print(string(chunk)) + return nil + })) + if err != nil { + log.Fatal(err) + } + _ = completion +}