Skip to content

Commit

Permalink
implement ITextCompletion and IStreamableTextCompletion, misc
Browse files Browse the repository at this point in the history
  • Loading branch information
dclipca committed Jan 26, 2025
1 parent 74e5b6b commit 6beeaa1
Show file tree
Hide file tree
Showing 8 changed files with 25 additions and 29 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ namespace SpongeEngine.LMStudioSharp.Tests.Common
public abstract class LmStudioTestBase : IDisposable
{
protected readonly ITestOutputHelper Output;
protected LmStudioSharpClient Client { get; init; } = null!;
protected LMStudioSharpClient Client { get; init; } = null!;
protected Model? DefaultModel { get; set; }

protected LmStudioTestBase(ITestOutputHelper output)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ public class IntegrationTests : LmStudioTestBase
{
public IntegrationTests(ITestOutputHelper output) : base(output)
{
Client = new LmStudioSharpClient(new LmStudioClientOptions()
Client = new LMStudioSharpClient(new LMStudioClientOptions()
{
HttpClient = new HttpClient
{
Expand Down
2 changes: 1 addition & 1 deletion SpongeEngine.LMStudioSharp.Tests/Unit/UnitTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ public class UnitTests : UnitTestBase
{
public UnitTests(ITestOutputHelper output) : base(output)
{
Client = new LmStudioSharpClient(new LmStudioClientOptions()
Client = new LMStudioSharpClient(new LMStudioClientOptions()
{
HttpClient = new HttpClient
{
Expand Down
6 changes: 6 additions & 0 deletions SpongeEngine.LMStudioSharp/LMStudioClientOptions.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
using SpongeEngine.SpongeLLM.Core;

namespace SpongeEngine.LMStudioSharp
{
public class LMStudioClientOptions : LLMClientBaseOptions {}
}
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,12 @@
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.Logging;
using SpongeEngine.LLMSharp.Core;
using SpongeEngine.LLMSharp.Core.Exceptions;
using SpongeEngine.LLMSharp.Core.Interfaces;
using SpongeEngine.LLMSharp.Core.Models;
using SpongeEngine.LMStudioSharp.Models.Completion;
using SpongeEngine.LMStudioSharp.Models.Model;
using SpongeEngine.SpongeLLM.Core;
using SpongeEngine.SpongeLLM.Core.Exceptions;
using SpongeEngine.SpongeLLM.Core.Interfaces;
using SpongeEngine.SpongeLLM.Core.Models;
using ChatRequest = SpongeEngine.LMStudioSharp.Models.Chat.ChatRequest;
using ChatResponse = SpongeEngine.LMStudioSharp.Models.Chat.ChatResponse;
using CompletionRequest = SpongeEngine.LMStudioSharp.Models.Completion.CompletionRequest;
Expand All @@ -18,9 +18,9 @@

namespace SpongeEngine.LMStudioSharp
{
public class LmStudioSharpClient : LlmClientBase, ICompletionService
public class LMStudioSharpClient : LLMClientBase, ITextCompletion, IStreamableTextCompletion
{
public override LmStudioClientOptions Options { get; }
public override LMStudioClientOptions Options { get; }

private const string API_VERSION = "v0";
private const string BASE_PATH = $"/api/{API_VERSION}";
Expand All @@ -29,7 +29,7 @@ public class LmStudioSharpClient : LlmClientBase, ICompletionService
private const string COMPLETIONS_ENDPOINT = $"{BASE_PATH}/completions";
private const string EMBEDDINGS_ENDPOINT = $"{BASE_PATH}/embeddings";

public LmStudioSharpClient(LmStudioClientOptions options) : base(options)
public LMStudioSharpClient(LMStudioClientOptions options) : base(options)
{
Options = options;
}
Expand Down Expand Up @@ -260,10 +260,8 @@ public class StreamChoice
public string? FinishReason { get; set; }
}
}

public async Task<CompletionResult> CompleteAsync(
LLMSharp.Core.Models.CompletionRequest request,
CancellationToken cancellationToken = default)

public async Task<TextCompletionResult> CompleteTextAsync(TextCompletionRequest request, CancellationToken cancellationToken = new CancellationToken())
{
// Convert LLMSharp Core request to LMStudio request
var lmStudioRequest = new Models.Completion.CompletionRequest
Expand Down Expand Up @@ -292,11 +290,11 @@ public async Task<CompletionResult> CompleteAsync(
var completionText = response.Choices.FirstOrDefault()?.GetText() ?? string.Empty;

// Convert LMStudio response to LLMSharp Core response
return new CompletionResult
return new TextCompletionResult
{
Text = completionText,
ModelId = response.Model,
TokenUsage = new CompletionTokenUsage
TokenUsage = new TextCompletionTokenUsage
{
PromptTokens = response.Usage.PromptTokens,
CompletionTokens = response.Usage.CompletionTokens ?? 0,
Expand All @@ -318,10 +316,8 @@ public async Task<CompletionResult> CompleteAsync(
}
};
}

public async IAsyncEnumerable<CompletionToken> StreamCompletionAsync(
LLMSharp.Core.Models.CompletionRequest request,
[EnumeratorCancellation] CancellationToken cancellationToken = default)

public async IAsyncEnumerable<TextCompletionToken> CompleteTextStreamAsync(TextCompletionRequest request, CancellationToken cancellationToken = new CancellationToken())

Check warning on line 320 in SpongeEngine.LMStudioSharp/LMStudioSharpClient.cs

View workflow job for this annotation

GitHub Actions / test

Async-iterator 'LMStudioSharpClient.CompleteTextStreamAsync(TextCompletionRequest, CancellationToken)' has one or more parameters of type 'CancellationToken' but none of them is decorated with the 'EnumeratorCancellation' attribute, so the cancellation token parameter from the generated 'IAsyncEnumerable<>.GetAsyncEnumerator' will be unconsumed

Check warning on line 320 in SpongeEngine.LMStudioSharp/LMStudioSharpClient.cs

View workflow job for this annotation

GitHub Actions / test

Async-iterator 'LMStudioSharpClient.CompleteTextStreamAsync(TextCompletionRequest, CancellationToken)' has one or more parameters of type 'CancellationToken' but none of them is decorated with the 'EnumeratorCancellation' attribute, so the cancellation token parameter from the generated 'IAsyncEnumerable<>.GetAsyncEnumerator' will be unconsumed

Check warning on line 320 in SpongeEngine.LMStudioSharp/LMStudioSharpClient.cs

View workflow job for this annotation

GitHub Actions / test

Async-iterator 'LMStudioSharpClient.CompleteTextStreamAsync(TextCompletionRequest, CancellationToken)' has one or more parameters of type 'CancellationToken' but none of them is decorated with the 'EnumeratorCancellation' attribute, so the cancellation token parameter from the generated 'IAsyncEnumerable<>.GetAsyncEnumerator' will be unconsumed

Check warning on line 320 in SpongeEngine.LMStudioSharp/LMStudioSharpClient.cs

View workflow job for this annotation

GitHub Actions / test

Async-iterator 'LMStudioSharpClient.CompleteTextStreamAsync(TextCompletionRequest, CancellationToken)' has one or more parameters of type 'CancellationToken' but none of them is decorated with the 'EnumeratorCancellation' attribute, so the cancellation token parameter from the generated 'IAsyncEnumerable<>.GetAsyncEnumerator' will be unconsumed

Check warning on line 320 in SpongeEngine.LMStudioSharp/LMStudioSharpClient.cs

View workflow job for this annotation

GitHub Actions / test

Async-iterator 'LMStudioSharpClient.CompleteTextStreamAsync(TextCompletionRequest, CancellationToken)' has one or more parameters of type 'CancellationToken' but none of them is decorated with the 'EnumeratorCancellation' attribute, so the cancellation token parameter from the generated 'IAsyncEnumerable<>.GetAsyncEnumerator' will be unconsumed

Check warning on line 320 in SpongeEngine.LMStudioSharp/LMStudioSharpClient.cs

View workflow job for this annotation

GitHub Actions / test

Async-iterator 'LMStudioSharpClient.CompleteTextStreamAsync(TextCompletionRequest, CancellationToken)' has one or more parameters of type 'CancellationToken' but none of them is decorated with the 'EnumeratorCancellation' attribute, so the cancellation token parameter from the generated 'IAsyncEnumerable<>.GetAsyncEnumerator' will be unconsumed
{
// Convert LLMSharp Core request to LMStudio request
var lmStudioRequest = new Models.Completion.CompletionRequest
Expand Down Expand Up @@ -352,7 +348,7 @@ public async IAsyncEnumerable<CompletionToken> StreamCompletionAsync(
var tokenCount = token.Split(new[] { ' ', '\n' }, StringSplitOptions.RemoveEmptyEntries).Length;
totalTokens += tokenCount;

yield return new CompletionToken
yield return new TextCompletionToken
{
Text = token,
TokenCount = totalTokens,
Expand Down
6 changes: 0 additions & 6 deletions SpongeEngine.LMStudioSharp/LmStudioClientOptions.cs

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="9.0.1" Condition="'$(TargetFramework)' == 'net7.0'" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="9.0.1" Condition="'$(TargetFramework)' == 'net8.0'" />
<PackageReference Include="Polly" Version="8.5.1" />
<PackageReference Include="SpongeEngine.LLMSharp.Core" Version="2.0.1" />
<PackageReference Include="SpongeEngine.SpongeLLM.Core" Version="0.0.3" />
<PackageReference Include="System.Linq.Async" Version="6.0.1" />
</ItemGroup>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

namespace SpongeEngine.LMStudioSharp.Utils
{
public static class LmStudioUtils
public static class LMStudioUtils
{
/// <summary>
/// Creates a default completion request with common settings
Expand Down

0 comments on commit 6beeaa1

Please sign in to comment.