Skip to content

Commit

Permalink
misc
Browse files Browse the repository at this point in the history
  • Loading branch information
dclipca committed Jan 18, 2025
1 parent 6f3e0fe commit 0eed711
Show file tree
Hide file tree
Showing 26 changed files with 236 additions and 747 deletions.
77 changes: 75 additions & 2 deletions SpongeEngine.LMStudioSharp.Tests/Common/LmStudioTestBase.cs
Original file line number Diff line number Diff line change
@@ -1,14 +1,22 @@
using Microsoft.Extensions.Logging;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.Logging;
using SpongeEngine.LMStudioSharp.Models.Model;
using WireMock.Server;
using Xunit;
using Xunit.Abstractions;

namespace SpongeEngine.LMStudioSharp.Tests.Common
{
public abstract class LmStudioTestBase : IDisposable
public abstract class LmStudioTestBase : IDisposable, IAsyncLifetime
{
protected readonly LmStudioSharpClient Client;
protected readonly ITestOutputHelper Output;
protected readonly WireMockServer Server;
protected readonly ILogger Logger;
protected readonly string BaseUrl;
protected bool ServerAvailable;
protected Model? DefaultModel;

protected LmStudioTestBase(ITestOutputHelper output)
{
Expand All @@ -17,11 +25,76 @@ protected LmStudioTestBase(ITestOutputHelper output)
Logger = LoggerFactory
.Create(builder => builder.AddXUnit(output))
.CreateLogger(GetType());



Output = output;
Logger = LoggerFactory
.Create(builder => builder.AddXUnit(output))
.CreateLogger(GetType());

Client = new LmStudioSharpClient(new LmStudioClientOptions()
{
BaseUrl = BaseUrl,
HttpClient = new HttpClient
{
BaseAddress = new Uri(TestConfig.NativeApiBaseUrl),
},
JsonSerializerOptions = new JsonSerializerOptions()
{
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
}
});
}

public virtual void Dispose()
{
Server.Dispose();
}

public async Task InitializeAsync()
{
try
{
ServerAvailable = await Client.IsAvailableAsync();
if (ServerAvailable)
{
Output.WriteLine("LM Studio server is available");

ModelsResponse modelsResponse = await Client.ListModelsAsync();
if (modelsResponse.Data.Any())
{
DefaultModel = new Model
{
Id = modelsResponse.Data[0].Id,
Object = modelsResponse.Data[0].Object,
// Map other properties as needed
};
Output.WriteLine($"Found model: {DefaultModel.Id}");
}
else
{
Output.WriteLine($"modelsResponse: {JsonSerializer.Serialize(modelsResponse)}");
Output.WriteLine("No models available");
throw new SkipException("No models available in LM Studio");
}
}
else
{
Output.WriteLine("LM Studio server is not available");
throw new SkipException("LM Studio server is not available");
}
}
catch (Exception ex) when (ex is not SkipException)
{
Output.WriteLine($"Failed to connect to LM Studio server: {ex.Message}");
throw new SkipException("Failed to connect to LM Studio server");
}
}

public Task DisposeAsync()
{
return Task.CompletedTask;
}
}
}
6 changes: 0 additions & 6 deletions SpongeEngine.LMStudioSharp.Tests/Common/TestConfig.cs
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,5 @@ public static class TestConfig

public static string NativeApiBaseUrl =>
Environment.GetEnvironmentVariable("LMSTUDIO_BASE_URL") ?? $"{DefaultHost}/api";

public static string OpenAiApiBaseUrl =>
Environment.GetEnvironmentVariable("LMSTUDIO_OPENAI_BASE_URL") ?? $"{DefaultHost}/v1";

// Extended timeout for large models
public static int TimeoutSeconds => 120;
}
}

This file was deleted.

Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
using FluentAssertions;
using SpongeEngine.LMStudioSharp.Models.Chat;
using SpongeEngine.LMStudioSharp.Models.Completion;
using SpongeEngine.LMStudioSharp.Tests.Common;
using Xunit;
using Xunit.Abstractions;

namespace SpongeEngine.LMStudioSharp.Tests.Integration.Providers.LmStudioSharpNative
namespace SpongeEngine.LMStudioSharp.Tests.Integration
{
[Trait("Category", "Integration")]
[Trait("API", "Native")]
public class Tests : TestBase
public class Tests : LmStudioTestBase
{
public Tests(ITestOutputHelper output) : base(output) { }

Expand All @@ -29,7 +30,7 @@ public async Task Complete_WithSimplePrompt_ShouldReturnResponse()
};

// Act
var response = await Provider.CompleteAsync(request);
var response = await Client.CompleteAsync(request);

// Assert
response.Should().NotBeNull();
Expand Down Expand Up @@ -58,7 +59,7 @@ public async Task StreamCompletion_ShouldStreamTokens()

try
{
await foreach (var token in Provider.StreamCompletionAsync(request, cts.Token))
await foreach (var token in Client.StreamCompletionAsync(request, cts.Token))
{
tokens.Add(token);
Output.WriteLine($"Received token: {token}");
Expand Down Expand Up @@ -102,7 +103,7 @@ public async Task Complete_WithStopSequence_ShouldReturnResponse()
};

// Act
var response = await Provider.CompleteAsync(request);
var response = await Client.CompleteAsync(request);

// Assert
response.Should().NotBeNull();
Expand Down Expand Up @@ -131,7 +132,7 @@ public async Task Complete_WithDifferentTemperatures_ShouldWork()
};

// Act
var response = await Provider.CompleteAsync(request);
var response = await Client.CompleteAsync(request);

// Assert
response.Should().NotBeNull();
Expand Down Expand Up @@ -162,7 +163,7 @@ public async Task ChatComplete_ShouldReturnResponse()
};

// Act
var response = await Provider.ChatCompleteAsync(request);
var response = await Client.ChatCompleteAsync(request);

// Assert
response.Should().NotBeNull();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,4 @@
<PackageReference Include="Xunit.SkippableFact" Version="1.5.23" />
<ProjectReference Include="..\SpongeEngine.LMStudioSharp\SpongeEngine.LMStudioSharp.csproj" />
</ItemGroup>

<ItemGroup>
<Folder Include="Integration\Providers\" />
</ItemGroup>
</Project>
Loading

0 comments on commit 0eed711

Please sign in to comment.