Skip to content

Commit

Permalink
Merge branch 'main' into docs_site_preview_in_ci
Browse files Browse the repository at this point in the history
  • Loading branch information
tmc authored Mar 13, 2024
2 parents 7f18e52 + ca2969c commit 8fc1ddd
Show file tree
Hide file tree
Showing 158 changed files with 2,737 additions and 510 deletions.
2 changes: 2 additions & 0 deletions .github/FUNDING.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
# These are supported funding model platforms
github: tmc
3 changes: 3 additions & 0 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -44,4 +44,7 @@ jobs:
- name: Build
run: go build -v ./...
- name: Test
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
GENAI_API_KEY: ${{ secrets.GENAI_API_KEY }}
run: go test -v ./...
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ Socktastic

Here are some links to blog posts and articles on using Langchain Go:

- [Using Gemini models in Go with LangChainGo](https://eli.thegreenplace.net/2024/using-gemini-models-in-go-with-langchaingo/) - Jan 2024
- [Using Ollama with LangChainGo](https://eli.thegreenplace.net/2023/using-ollama-with-langchaingo/) - Nov 2023
- [Creating a simple ChatGPT clone with Go](https://sausheong.com/creating-a-simple-chatgpt-clone-with-go-c40b4bec9267?sk=53a2bcf4ce3b0cfae1a4c26897c0deb0) - Aug 2023
- [Creating a ChatGPT Clone that Runs on Your Laptop with Go](https://sausheong.com/creating-a-chatgpt-clone-that-runs-on-your-laptop-with-go-bf9d41f1cf88?sk=05dc67b60fdac6effb1aca84dd2d654e) - Aug 2023
4 changes: 2 additions & 2 deletions agents/mrkl_prompt.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ import (
)

const (
_defaultMrklPrefix = `Today is {{.today}} and you can use tools to get new information.
Answer the following questions as best you can using the following tools:
_defaultMrklPrefix = `Today is {{.today}}.
Answer the following questions as best you can. You have access to the following tools:
{{.tool_descriptions}}`

Expand Down
30 changes: 24 additions & 6 deletions callbacks/agent_final_stream.go
Original file line number Diff line number Diff line change
Expand Up @@ -76,28 +76,32 @@ func (handler *AgentFinalStreamHandler) ReadFromEgress(
func (handler *AgentFinalStreamHandler) HandleStreamingFunc(_ context.Context, chunk []byte) {
chunkStr := string(chunk)
handler.LastTokens += chunkStr
var detectedKeyword string

// Buffer the last few chunks to match the longest keyword size
longestSize := len(handler.Keywords[0])
var longestSize int
for _, k := range handler.Keywords {
if len(k) > longestSize {
longestSize = len(k)
}
}

if len(handler.LastTokens) > longestSize {
handler.LastTokens = handler.LastTokens[len(handler.LastTokens)-longestSize:]
}

// Check for keywords
for _, k := range DefaultKeywords {
if strings.Contains(handler.LastTokens, k) {
handler.KeywordDetected = true
detectedKeyword = k
}
}

if len(handler.LastTokens) > longestSize {
handler.LastTokens = handler.LastTokens[len(handler.LastTokens)-longestSize:]
}

// Check for colon and set print mode.
if handler.KeywordDetected && chunkStr != ":" {
if handler.KeywordDetected && !handler.PrintOutput {
// remove any other strings before the final answer
chunk = []byte(filterFinalString(chunkStr, detectedKeyword))
handler.PrintOutput = true
}

Expand All @@ -106,3 +110,17 @@ func (handler *AgentFinalStreamHandler) HandleStreamingFunc(_ context.Context, c
handler.egress <- chunk
}
}

func filterFinalString(chunkStr, keyword string) string {
chunkStr = strings.TrimLeft(chunkStr, " ")

index := strings.Index(chunkStr, keyword)
switch {
case index != -1:
chunkStr = chunkStr[index+len(keyword):]
case strings.HasPrefix(chunkStr, ":"):
chunkStr = strings.TrimPrefix(chunkStr, ":")
}

return strings.TrimLeft(chunkStr, " ")
}
53 changes: 53 additions & 0 deletions callbacks/agent_final_stream_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
package callbacks

import (
"testing"

"github.com/stretchr/testify/require"
)

func TestFilterFinalString(t *testing.T) {
t.Parallel()

cases := []struct {
keyword string
inputStr string
expected string
}{
{
keyword: "Final Answer:",
inputStr: "This is a correct final string.",
expected: "This is a correct final string.",
},
{
keyword: "Final Answer:",
inputStr: " some other text above.\nFinal Answer: This is a correct final string.",
expected: "This is a correct final string.",
},
{
keyword: "Final Answer:",
inputStr: " another text before. Final Answer: This is a correct final string.",
expected: "This is a correct final string.",
},
{
keyword: "Final Answer:",
inputStr: ` : This is a correct final string.`,
expected: "This is a correct final string.",
},
{
keyword: "Customed KeyWord_2:",
inputStr: " some other text above.\nSome Customed KeyWord_2: This is a correct final string.",
expected: "This is a correct final string.",
},
{
keyword: "Customed KeyWord_$#@-123:",
inputStr: " another text before keyword. Some Customed KeyWord_$#@-123: This is a correct final string.",
expected: "This is a correct final string.",
},
}

for _, tc := range cases {
filteredStr := filterFinalString(tc.inputStr, tc.keyword)
require.Equal(t, tc.expected, filteredStr)
}
}
2 changes: 1 addition & 1 deletion chains/api_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -66,5 +66,5 @@ func TestAPI(t *testing.T) {
if !ok {
t.Fatal("expected answer to be a string")
}
require.True(t, strings.Contains(answer, "temperature"), `result does not contain the keyword 'temperature'`)
require.True(t, strings.Contains(answer, "Munich"), `result does not contain the keyword 'Munich'`)
}
4 changes: 2 additions & 2 deletions chains/chains.go
Original file line number Diff line number Diff line change
Expand Up @@ -87,8 +87,8 @@ func callChain(
return outputValues, nil
}

// Run can be used to execute a chain if the chain only expects one input and one
// string output.
// Run can be used to execute a chain if the chain only expects one input and
// one string output.
func Run(ctx context.Context, c Chain, input any, options ...ChainCallOption) (string, error) {
inputKeys := c.GetInputKeys()
memoryKeys := c.GetMemory().MemoryVariables(ctx)
Expand Down
5 changes: 2 additions & 3 deletions chains/llm_math.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,7 @@ func NewLLMMathChain(llm llms.Model) LLMMathChain {
}
}

// Call gets relevant documents from the retriever and gives them to the combine
// documents chain.
// Call runs the logic of the LLM Math chain and returns the output.
func (c LLMMathChain) Call(ctx context.Context, values map[string]any, options ...ChainCallOption) (map[string]any, error) { // nolint: lll
question, ok := values["question"].(string)
if !ok {
Expand Down Expand Up @@ -71,7 +70,7 @@ func (c LLMMathChain) processLLMResult(llmOutput string) (string, error) {
expression := textMatch[1]
output, err := c.evaluateExpression(expression)
if err != nil {
return "", err
return "", fmt.Errorf("evaluating expression: %w", err)
}
return output, nil
}
Expand Down
33 changes: 33 additions & 0 deletions chains/llm_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import (

"github.com/stretchr/testify/require"
"github.com/tmc/langchaingo/callbacks"
"github.com/tmc/langchaingo/llms/googleai"
"github.com/tmc/langchaingo/llms/openai"
"github.com/tmc/langchaingo/prompts"
)
Expand Down Expand Up @@ -55,3 +56,35 @@ func TestLLMChainWithChatPromptTemplate(t *testing.T) {
require.NoError(t, err)
require.Equal(t, "AI: foo\nHuman: boo", result)
}

func TestLLMChainWithGoogleAI(t *testing.T) {
t.Parallel()
genaiKey := os.Getenv("GENAI_API_KEY")
if genaiKey == "" {
t.Skip("GENAI_API_KEY not set")
}
model, err := googleai.New(context.Background(), googleai.WithAPIKey(genaiKey))
require.NoError(t, err)
require.NoError(t, err)
model.CallbacksHandler = callbacks.LogHandler{}

prompt := prompts.NewPromptTemplate(
"What is the capital of {{.country}}",
[]string{"country"},
)
require.NoError(t, err)

chain := NewLLMChain(model, prompt)

// chains tramples over defaults for options, so setting these options
// explicitly is required until https://github.com/tmc/langchaingo/issues/626
// is fully resolved.
result, err := Predict(context.Background(), chain,
map[string]any{
"country": "France",
},
WithCallback(callbacks.LogHandler{}),
)
require.NoError(t, err)
require.True(t, strings.Contains(result, "Paris"))
}
Loading

0 comments on commit 8fc1ddd

Please sign in to comment.