Skip to content

Commit

Permalink
fix StreamChatAsync and make an unit and integration test for it
Browse files Browse the repository at this point in the history
  • Loading branch information
dclipca committed Feb 8, 2025
1 parent 5259f29 commit ec54024
Show file tree
Hide file tree
Showing 5 changed files with 93 additions and 4 deletions.
35 changes: 35 additions & 0 deletions SpongeEngine.LMStudioSharp.Tests/Integration/IntegrationTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
using SpongeEngine.LMStudioSharp.Models.Chat;
using SpongeEngine.LMStudioSharp.Models.Completion;
using SpongeEngine.LMStudioSharp.Tests.Common;
using WireMock.RequestBuilders;
using WireMock.ResponseBuilders;
using Xunit;
using Xunit.Abstractions;

Expand All @@ -31,6 +33,39 @@ public IntegrationTests(ITestOutputHelper output) : base(output)
.CreateLogger(GetType()),
});
}

[SkippableFact]
[Trait("Category", "Integration")]
public async Task StreamChatAsync_WithDeltaProperty_ShouldStreamTokens()
{
// Arrange: Create a chat request with streaming enabled.
var request = new ChatRequest
{
Model = "test-model", // Use your model ID if available.
Temperature = 0.7f,
Stream = true
};
request.Messages.Add(new ChatMessage { Role = "user", Content = "Hello" });

// Act: Consume the streaming response from the LM Studio server.
var receivedTokens = new List<string>();
await foreach (var token in Client.StreamChatAsync(request))
{
receivedTokens.Add(token);
Output.WriteLine($"Received token: {token}");
}

// If no tokens were received, we skip the test
// (for example, if the LM Studio server isn’t returning streaming tokens yet).
if (receivedTokens.Count == 0)
{
throw new SkipException("LM Studio server did not return any streaming tokens. " +
"Ensure that streaming chat responses are enabled and use the delta format.");
}

// Assert: Verify that some tokens were streamed.
receivedTokens.Should().NotBeEmpty("the LM Studio server should stream tokens for chat requests using the delta property");
}

[SkippableFact]
[Trait("Category", "Integration")]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<Nullable>enable</Nullable>
<IsPackable>false</IsPackable>
<TargetFrameworks>net8.0;net6.0;net7.0</TargetFrameworks>
<Version>0.3.9.1</Version>
<Version>0.3.9.2</Version>
</PropertyGroup>

<ItemGroup>
Expand Down
42 changes: 42 additions & 0 deletions SpongeEngine.LMStudioSharp.Tests/Unit/UnitTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,48 @@ public UnitTests(ITestOutputHelper output) : base(output)
.CreateLogger(GetType()),
});
}

[Fact]
public async Task StreamChatAsync_WithDeltaProperty_ShouldStreamTokens_Unit()
{
// Arrange: Create a chat request with streaming enabled.
var request = new ChatRequest
{
Model = "test-model",
Messages = new List<ChatMessage>
{
new() { Role = "user", Content = "Hello" }
},
Temperature = 0.7f,
Stream = true
};

// Define the tokens that should be returned by the stream.
var tokens = new[] { "Hello", " there", "!" };

// Create simulated SSE responses using the "delta" property.
var streamResponses = tokens.Select(token => $"data: {{\"choices\": [{{\"delta\": {{\"content\": \"{token}\"}}}}]}}\n\n");

// Configure WireMock to simulate the chat completions endpoint with delta-based tokens.
Server
.Given(Request.Create()
.WithPath("/api/v0/chat/completions")
.UsingPost())
.RespondWith(Response.Create()
.WithStatusCode(200)
.WithBody(string.Join("", streamResponses) + "data: [DONE]\n\n")
.WithHeader("Content-Type", "text/event-stream"));

// Act: Consume the streaming response.
var receivedTokens = new List<string>();
await foreach (var token in Client.StreamChatAsync(request))
{
receivedTokens.Add(token);
}

// Assert: Verify that the tokens received match the expected tokens.
receivedTokens.Should().BeEquivalentTo(tokens);
}

[Fact]
public async Task ListModelsAsync_ShouldReturnModels()
Expand Down
16 changes: 14 additions & 2 deletions SpongeEngine.LMStudioSharp/LMStudioSharpClient.cs
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,8 @@ private async IAsyncEnumerable<string> StreamResponseAsync<T>(string endpoint, T
try
{
var streamResponse = JsonSerializer.Deserialize<StreamResponse>(data, Options.JsonSerializerOptions);
token = streamResponse?.Choices?.FirstOrDefault()?.Text;
var choice = streamResponse?.Choices?.FirstOrDefault();
token = choice?.Text ?? choice?.Delta?.Content;
}
catch (JsonException ex)
{
Expand All @@ -253,12 +254,23 @@ private class StreamResponse

public class StreamChoice
{
// For plain text completions:
[JsonPropertyName("text")]
public string Text { get; set; } = string.Empty;
public string? Text { get; set; }

// For chat completions:
[JsonPropertyName("delta")]
public Delta? Delta { get; set; }

[JsonPropertyName("finish_reason")]
public string? FinishReason { get; set; }
}

public class Delta
{
[JsonPropertyName("content")]
public string? Content { get; set; }
}
}

public async Task<TextCompletionResult> CompleteTextAsync(TextCompletionRequest request, CancellationToken cancellationToken = new CancellationToken())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
<!-- Package Info -->
<PackageId>SpongeEngine.LMStudioSharp</PackageId>
<Title>LMStudioSharp</Title>
<Version>0.3.9.1</Version>
<Version>0.3.9.2</Version>
<Authors>Dan Clipca</Authors>
<Company>Sponge Engine</Company>
<Description>C# client for LM Studio native API.</Description>
Expand Down

0 comments on commit ec54024

Please sign in to comment.