Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add ReasoningContent field for deepseek-reasoner model #1121

Merged
merged 4 commits into from
Feb 2, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
package main

import (
"context"
"fmt"
"log"

"github.com/tmc/langchaingo/llms"
"github.com/tmc/langchaingo/llms/openai"
)

func main() {
// Initialize the OpenAI client with Deepseek model
llm, err := openai.New(
openai.WithModel("deepseek-reasoner"),
)
if err != nil {
log.Fatal(err)
}

ctx := context.Background()

// Create messages for the chat
content := []llms.MessageContent{
llms.TextParts(llms.ChatMessageTypeSystem, "You are a helpful assistant that explains complex topics step by step"),
llms.TextParts(llms.ChatMessageTypeHuman, "Explain how quantum entanglement works and why it's important for quantum computing"),
}

// Generate content with streaming to see both reasoning and final answer in real-time
completion, err := llm.GenerateContent(
ctx,
content,
llms.WithMaxTokens(2000),
llms.WithTemperature(0.7),
llms.WithStreamingFunc(func(ctx context.Context, chunk []byte) error {
fmt.Print(string(chunk))
return nil
}),
)
if err != nil {
log.Fatal(err)
}

// Access the reasoning content and final answer separately
if len(completion.Choices) > 0 {
choice := completion.Choices[0]
fmt.Printf("\n\nReasoning Process:\n%s\n", choice.ReasoningContent)
fmt.Printf("\nFinal Answer:\n%s\n", choice.Content)
}
}
3 changes: 3 additions & 0 deletions llms/chat_messages.go
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,9 @@ type AIChatMessage struct {

// ToolCalls represents the model choosing to call tools.
ToolCalls []ToolCall `json:"tool_calls,omitempty"`

// This field is only used with the deepseek-reasoner model and represents the reasoning contents of the assistant message before the final answer.
ReasoningContent string `json:"reasoning_content,omitempty"`
}

func (m AIChatMessage) GetType() ChatMessageType { return ChatMessageTypeAI }
Expand Down
3 changes: 3 additions & 0 deletions llms/generatecontent.go
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,9 @@ type ContentChoice struct {

// ToolCalls is a list of tool calls the model asks to invoke.
ToolCalls []ToolCall

// This field is only used with the deepseek-reasoner model and represents the reasoning contents of the assistant message before the final answer.
ReasoningContent string
}

// TextParts is a helper function to create a MessageContent with a role and a
Expand Down
15 changes: 15 additions & 0 deletions llms/openai/internal/openaiclient/chat.go
Original file line number Diff line number Diff line change
Expand Up @@ -157,6 +157,9 @@ type ChatMessage struct { //nolint:musttag
// ToolCallID is the ID of the tool call this message is for.
// Only present in tool messages.
ToolCallID string `json:"tool_call_id,omitempty"`

// This field is only used with the deepseek-reasoner model and represents the reasoning contents of the assistant message before the final answer.
ReasoningContent string `json:"reasoning_content,omitempty"`
}

func (m ChatMessage) MarshalJSON() ([]byte, error) {
Expand All @@ -181,6 +184,9 @@ func (m ChatMessage) MarshalJSON() ([]byte, error) {
// ToolCallID is the ID of the tool call this message is for.
// Only present in tool messages.
ToolCallID string `json:"tool_call_id,omitempty"`

// This field is only used with the deepseek-reasoner model and represents the reasoning contents of the assistant message before the final answer.
ReasoningContent string `json:"reasoning_content,omitempty"`
}(m)
return json.Marshal(msg)
}
Expand All @@ -196,6 +202,9 @@ func (m ChatMessage) MarshalJSON() ([]byte, error) {
// ToolCallID is the ID of the tool call this message is for.
// Only present in tool messages.
ToolCallID string `json:"tool_call_id,omitempty"`

// This field is only used with the deepseek-reasoner model and represents the reasoning contents of the assistant message before the final answer.
ReasoningContent string `json:"reasoning_content,omitempty"`
}(m)
return json.Marshal(msg)
}
Expand All @@ -221,6 +230,9 @@ func (m *ChatMessage) UnmarshalJSON(data []byte) error {
// ToolCallID is the ID of the tool call this message is for.
// Only present in tool messages.
ToolCallID string `json:"tool_call_id,omitempty"`

// This field is only used with the deepseek-reasoner model and represents the reasoning contents of the assistant message before the final answer.
ReasoningContent string `json:"reasoning_content,omitempty"`
}{}
err := json.Unmarshal(data, &msg)
if err != nil {
Expand Down Expand Up @@ -322,6 +334,8 @@ type StreamedChatResponsePayload struct {
FunctionCall *FunctionCall `json:"function_call,omitempty"`
// ToolCalls is a list of tools that were called in the message.
ToolCalls []*ToolCall `json:"tool_calls,omitempty"`
// This field is only used with the deepseek-reasoner model and represents the reasoning contents of the assistant message before the final answer.
ReasoningContent string `json:"reasoning_content,omitempty"`
} `json:"delta,omitempty"`
FinishReason FinishReason `json:"finish_reason,omitempty"`
} `json:"choices,omitempty"`
Expand Down Expand Up @@ -481,6 +495,7 @@ func combineStreamingChatResponse(
chunk := []byte(choice.Delta.Content)
response.Choices[0].Message.Content += choice.Delta.Content
response.Choices[0].FinishReason = choice.FinishReason
response.Choices[0].Message.ReasoningContent = choice.Delta.ReasoningContent

if choice.Delta.FunctionCall != nil {
chunk = updateFunctionCall(response.Choices[0].Message, choice.Delta.FunctionCall)
Expand Down
40 changes: 40 additions & 0 deletions llms/openai/internal/openaiclient/chat_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,29 @@ func TestParseStreamingChatResponse_FinishReason(t *testing.T) {
assert.Equal(t, FinishReason("stop"), resp.Choices[0].FinishReason)
}

func TestParseStreamingChatResponse_ReasoningContent(t *testing.T) {
t.Parallel()
mockBody := `data: {"choices":[{"index":0,"delta":{"role":"assistant","content":"final answer","reasoning_content":"step-by-step reasoning"},"finish_reason":"stop"}]}`
r := &http.Response{
StatusCode: http.StatusOK,
Body: io.NopCloser(bytes.NewBufferString(mockBody)),
}

req := &ChatRequest{
StreamingFunc: func(_ context.Context, _ []byte) error {
return nil
},
}

resp, err := parseStreamingChatResponse(context.Background(), r, req)

require.NoError(t, err)
assert.NotNil(t, resp)
assert.Equal(t, "final answer", resp.Choices[0].Message.Content)
assert.Equal(t, "step-by-step reasoning", resp.Choices[0].Message.ReasoningContent)
assert.Equal(t, FinishReason("stop"), resp.Choices[0].FinishReason)
}

func TestChatMessage_MarshalUnmarshal(t *testing.T) {
t.Parallel()
msg := ChatMessage{
Expand All @@ -52,3 +75,20 @@ func TestChatMessage_MarshalUnmarshal(t *testing.T) {
require.NoError(t, err)
require.Equal(t, msg, msg2)
}

func TestChatMessage_MarshalUnmarshal_WithReasoning(t *testing.T) {
t.Parallel()
msg := ChatMessage{
Role: "assistant",
Content: "final answer",
ReasoningContent: "step-by-step reasoning",
}
text, err := json.Marshal(msg)
require.NoError(t, err)
require.Equal(t, `{"role":"assistant","content":"final answer","reasoning_content":"step-by-step reasoning"}`, string(text))

var msg2 ChatMessage
err = json.Unmarshal(text, &msg2)
require.NoError(t, err)
require.Equal(t, msg, msg2)
}