diff --git a/OpenAI.ChatGpt.AspNetCore/ChatGPTFactory.cs b/OpenAI.ChatGpt.AspNetCore/ChatGPTFactory.cs index b26022d..61c5662 100644 --- a/OpenAI.ChatGpt.AspNetCore/ChatGPTFactory.cs +++ b/OpenAI.ChatGpt.AspNetCore/ChatGPTFactory.cs @@ -9,7 +9,7 @@ namespace OpenAI.ChatGpt.AspNetCore; /// /// builder.Services.AddHttpClient<ChatGPTFactory<(client => /// { -/// client.DefaultRequestHeaders.Authorization = builder.Configuration["ChatGPTCredentials:ApiKey"]; +/// client.DefaultRequestHeaders.Authorization = builder.Configuration["OpenAICredentials:ApiKey"]; /// }) /// .AddPolicyHandler(GetRetryPolicy()) /// .AddPolicyHandler(GetCircuitBreakerPolicy()); @@ -19,7 +19,7 @@ namespace OpenAI.ChatGpt.AspNetCore; public class ChatGPTFactory : IDisposable { private readonly OpenAiClient _client; - private readonly ChatCompletionsConfig _config; + private readonly ChatGPTConfig _config; private readonly IChatHistoryStorage _chatHistoryStorage; private readonly ITimeProvider _clock; private bool _ensureStorageCreatedCalled; @@ -27,8 +27,8 @@ public class ChatGPTFactory : IDisposable public ChatGPTFactory( IHttpClientFactory httpClientFactory, - IOptions credentials, - IOptions config, + IOptions credentials, + IOptions config, IChatHistoryStorage chatHistoryStorage, ITimeProvider clock) { @@ -42,8 +42,8 @@ public ChatGPTFactory( } internal ChatGPTFactory( - IOptions credentials, - IOptions config, + IOptions credentials, + IOptions config, IChatHistoryStorage chatHistoryStorage, ITimeProvider clock) { @@ -58,18 +58,18 @@ public ChatGPTFactory( string apiKey, IChatHistoryStorage chatHistoryStorage, ITimeProvider? clock = null, - ChatCompletionsConfig? config = null) + ChatGPTConfig? config = null) { if (apiKey == null) throw new ArgumentNullException(nameof(apiKey)); _client = new OpenAiClient(apiKey); - _config = config ?? ChatCompletionsConfig.Default; + _config = config ?? ChatGPTConfig.Default; _chatHistoryStorage = chatHistoryStorage ?? throw new ArgumentNullException(nameof(chatHistoryStorage)); _clock = clock ?? new TimeProviderUtc(); } private OpenAiClient CreateOpenAiClient( IHttpClientFactory httpClientFactory, - IOptions credentials) + IOptions credentials) { var httpClient = httpClientFactory.CreateClient(nameof(ChatGPTFactory)); httpClient.DefaultRequestHeaders.Authorization = credentials.Value.GetAuthHeader(); @@ -77,7 +77,7 @@ private OpenAiClient CreateOpenAiClient( return new OpenAiClient(httpClient); } - public static ChatGPTFactory CreateInMemory(string apiKey, ChatCompletionsConfig? config = null) + public static ChatGPTFactory CreateInMemory(string apiKey, ChatGPTConfig? config = null) { if (apiKey == null) throw new ArgumentNullException(nameof(apiKey)); return new ChatGPTFactory(apiKey, new InMemoryChatHistoryStorage(), new TimeProviderUtc(), config); @@ -85,7 +85,7 @@ public static ChatGPTFactory CreateInMemory(string apiKey, ChatCompletionsConfig public async Task Create( string userId, - ChatCompletionsConfig? config = null, + ChatGPTConfig? config = null, bool ensureStorageCreated = true, CancellationToken cancellationToken = default) { @@ -100,12 +100,12 @@ public async Task Create( _chatHistoryStorage, _clock, userId, - ChatCompletionsConfig.Combine(_config, config) + ChatGPTConfig.Combine(_config, config) ); } public async Task Create( - ChatCompletionsConfig? config = null, + ChatGPTConfig? config = null, bool ensureStorageCreated = true, CancellationToken cancellationToken = default) { @@ -117,7 +117,7 @@ public async Task Create( _client, _chatHistoryStorage, _clock, - ChatCompletionsConfig.Combine(_config, config) + ChatGPTConfig.Combine(_config, config) ); } diff --git a/OpenAI.ChatGpt.AspNetCore/Extensions/ServiceCollectionExtensions.cs b/OpenAI.ChatGpt.AspNetCore/Extensions/ServiceCollectionExtensions.cs index e49edb8..0888c0c 100644 --- a/OpenAI.ChatGpt.AspNetCore/Extensions/ServiceCollectionExtensions.cs +++ b/OpenAI.ChatGpt.AspNetCore/Extensions/ServiceCollectionExtensions.cs @@ -6,14 +6,15 @@ namespace OpenAI.ChatGpt.AspNetCore.Extensions; public static class ServiceCollectionExtensions { - public const string CredentialsConfigSectionPathDefault = "ChatGptCredentials"; - public const string CompletionsConfigSectionPathDefault = "ChatCompletionsConfig"; + public const string CredentialsConfigSectionPathDefault = "OpenAICredentials"; + // ReSharper disable once InconsistentNaming + public const string CchatGPTConfigSectionPathDefault = "ChatGPTConfig"; public static IServiceCollection AddChatGptInMemoryIntegration( this IServiceCollection services, bool injectInMemoryChat = true, string credentialsConfigSectionPath = CredentialsConfigSectionPathDefault, - string completionsConfigSectionPath = CompletionsConfigSectionPathDefault) + string completionsConfigSectionPath = CchatGPTConfigSectionPathDefault) { ArgumentNullException.ThrowIfNull(services); if (string.IsNullOrWhiteSpace(credentialsConfigSectionPath)) @@ -60,7 +61,7 @@ private static Chat CreateChatGptChat(IServiceProvider provider) public static IServiceCollection AddChatGptIntegrationCore( this IServiceCollection services, string credentialsConfigSectionPath = CredentialsConfigSectionPathDefault, - string completionsConfigSectionPath = CompletionsConfigSectionPathDefault) + string completionsConfigSectionPath = CchatGPTConfigSectionPathDefault) { ArgumentNullException.ThrowIfNull(services); if (string.IsNullOrWhiteSpace(credentialsConfigSectionPath)) @@ -74,11 +75,11 @@ public static IServiceCollection AddChatGptIntegrationCore( nameof(completionsConfigSectionPath)); } - services.AddOptions() + services.AddOptions() .BindConfiguration(credentialsConfigSectionPath) .ValidateDataAnnotations() .ValidateOnStart(); - services.AddOptions() + services.AddOptions() .BindConfiguration(completionsConfigSectionPath) .Configure(_ => { }) //optional .ValidateDataAnnotations() diff --git a/OpenAI.ChatGpt.AspNetCore/Models/ChatGptCredentials.cs b/OpenAI.ChatGpt.AspNetCore/Models/OpenAICredentials.cs similarity index 87% rename from OpenAI.ChatGpt.AspNetCore/Models/ChatGptCredentials.cs rename to OpenAI.ChatGpt.AspNetCore/Models/OpenAICredentials.cs index 9edff8e..3346ccb 100644 --- a/OpenAI.ChatGpt.AspNetCore/Models/ChatGptCredentials.cs +++ b/OpenAI.ChatGpt.AspNetCore/Models/OpenAICredentials.cs @@ -5,7 +5,8 @@ namespace OpenAI.ChatGpt.AspNetCore.Models; -public class ChatGptCredentials +// ReSharper disable once InconsistentNaming +public class OpenAICredentials { /// /// OpenAI API key. Can be issued here: https://platform.openai.com/account/api-keys diff --git a/OpenAI.ChatGpt.AspNetCore/OpenAI.ChatGpt.AspNetCore.csproj b/OpenAI.ChatGpt.AspNetCore/OpenAI.ChatGpt.AspNetCore.csproj index dcbb37b..0ba37c9 100644 --- a/OpenAI.ChatGpt.AspNetCore/OpenAI.ChatGpt.AspNetCore.csproj +++ b/OpenAI.ChatGpt.AspNetCore/OpenAI.ChatGpt.AspNetCore.csproj @@ -8,7 +8,7 @@ OpenAI.ChatGPT.AspNetCore https://github.com/rodion-m/ChatGPT_API_dotnet OpenAI ChatGPT integration for .NET with DI - 2.1.0 + 2.2.0 OpenAI Chat Completions API (ChatGPT) integration with easy DI supporting (Microsoft.Extensions.DependencyInjection). It allows you to use the API in your .NET applications. Also, the client supports streaming responses (like ChatGPT) via async streams. https://github.com/rodion-m/ChatGPT_API_dotnet net6.0;net7.0 diff --git a/OpenAI.ChatGpt.EntityFrameworkCore/Extensions/ServiceCollectionExtensions.cs b/OpenAI.ChatGpt.EntityFrameworkCore/Extensions/ServiceCollectionExtensions.cs index dc400c8..b7e95ad 100644 --- a/OpenAI.ChatGpt.EntityFrameworkCore/Extensions/ServiceCollectionExtensions.cs +++ b/OpenAI.ChatGpt.EntityFrameworkCore/Extensions/ServiceCollectionExtensions.cs @@ -13,7 +13,7 @@ public static IServiceCollection AddChatGptEntityFrameworkIntegration( this IServiceCollection services, Action optionsAction, string credentialsConfigSectionPath = CredentialsConfigSectionPathDefault, - string completionsConfigSectionPath = CompletionsConfigSectionPathDefault) + string completionsConfigSectionPath = CchatGPTConfigSectionPathDefault) { ArgumentNullException.ThrowIfNull(services); ArgumentNullException.ThrowIfNull(optionsAction); diff --git a/OpenAI.ChatGpt.EntityFrameworkCore/OpenAI.ChatGpt.EntityFrameworkCore.csproj b/OpenAI.ChatGpt.EntityFrameworkCore/OpenAI.ChatGpt.EntityFrameworkCore.csproj index 865684a..3cdc37d 100644 --- a/OpenAI.ChatGpt.EntityFrameworkCore/OpenAI.ChatGpt.EntityFrameworkCore.csproj +++ b/OpenAI.ChatGpt.EntityFrameworkCore/OpenAI.ChatGpt.EntityFrameworkCore.csproj @@ -9,7 +9,7 @@ OpenAI.ChatGPT.EntityFrameworkCore https://github.com/rodion-m/ChatGPT_API_dotnet OpenAI ChatGPT integration for .NET with EF Core storage - 2.1.0 + 2.2.0 OpenAI Chat Completions API (ChatGPT) integration with DI and EF Core supporting. It allows you to use the API in your .NET applications. Also, the client supports streaming responses (like ChatGPT) via async streams. https://github.com/rodion-m/ChatGPT_API_dotnet net6.0;net7.0 diff --git a/OpenAI.ChatGpt/Chat.cs b/OpenAI.ChatGpt/Chat.cs index 56829d4..5872424 100644 --- a/OpenAI.ChatGpt/Chat.cs +++ b/OpenAI.ChatGpt/Chat.cs @@ -1,8 +1,10 @@ -using System.Runtime.CompilerServices; +using System.Diagnostics.CodeAnalysis; +using System.Runtime.CompilerServices; using System.Text; using OpenAI.ChatGpt.Interfaces; using OpenAI.ChatGpt.Internal; using OpenAI.ChatGpt.Models; +using OpenAI.ChatGpt.Models.ChatCompletion; using OpenAI.ChatGpt.Models.ChatCompletion.Messaging; namespace OpenAI.ChatGpt; @@ -11,6 +13,7 @@ namespace OpenAI.ChatGpt; /// Used for communication between a user and the assistant (ChatGPT). /// /// Not thread-safe. Use one instance per user. +[SuppressMessage("ReSharper", "UnusedAutoPropertyAccessor.Global")] public class Chat : IDisposable, IAsyncDisposable { public Topic Topic { get; } @@ -18,6 +21,8 @@ public class Chat : IDisposable, IAsyncDisposable public Guid TopicId => Topic.Id; public bool IsWriting { get; private set; } public bool IsCancelled => _cts?.IsCancellationRequested ?? false; + + public ChatCompletionResponse? LastResponse { get; private set; } private readonly IChatHistoryStorage _chatHistoryStorage; private readonly ITimeProvider _clock; @@ -44,6 +49,26 @@ internal Chat( _clearOnDisposal = clearOnDisposal; } + public void Dispose() + { + _cts?.Dispose(); + if (_clearOnDisposal) + { + // TODO: log warning about sync disposal + _chatHistoryStorage.DeleteTopic(UserId, TopicId, default) + .GetAwaiter().GetResult(); + } + } + + public async ValueTask DisposeAsync() + { + _cts?.Dispose(); + if (_clearOnDisposal) + { + await _chatHistoryStorage.DeleteTopic(UserId, TopicId, default); + } + } + public Task GetNextMessageResponse( string message, CancellationToken cancellationToken = default) @@ -57,26 +82,29 @@ private async Task GetNextMessageResponse( UserOrSystemMessage message, CancellationToken cancellationToken) { + _cts?.Dispose(); _cts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken); _cts.Token.Register(() => IsWriting = false); var history = await LoadHistory(cancellationToken); var messages = history.Append(message); - IsWriting = true; - var response = await _client.GetChatCompletions( + IsWriting = true; //TODO set false on exception + var response = await _client.GetChatCompletionsRaw( messages, user: Topic.Config.PassUserIdToOpenAiRequests is true ? UserId : null, requestModifier: Topic.Config.ModifyRequest, cancellationToken: _cts.Token ); + SetLastResponse(response); + var assistantMessage = response.GetMessageContent(); await _chatHistoryStorage.SaveMessages( - UserId, TopicId, message, response, _clock.GetCurrentTime(), _cts.Token); + UserId, TopicId, message, assistantMessage, _clock.GetCurrentTime(), _cts.Token); IsWriting = false; _isNew = false; - return response; + return assistantMessage; } public IAsyncEnumerable StreamNextMessageResponse( @@ -95,6 +123,7 @@ private async IAsyncEnumerable StreamNextMessageResponse( [EnumeratorCancellation] CancellationToken cancellationToken) { var originalCancellationToken = cancellationToken; + _cts?.Dispose(); _cts = CancellationTokenSource.CreateLinkedTokenSource(originalCancellationToken); cancellationToken = _cts.Token; cancellationToken.Register(() => IsWriting = false); @@ -102,7 +131,7 @@ private async IAsyncEnumerable StreamNextMessageResponse( var history = await LoadHistory(cancellationToken); var messages = history.Append(message); var sb = new StringBuilder(); - IsWriting = true; + IsWriting = true; //TODO set false on exception var stream = _client.StreamChatCompletions( messages, user: Topic.Config.PassUserIdToOpenAiRequests is true ? UserId : null, @@ -118,7 +147,12 @@ private async IAsyncEnumerable StreamNextMessageResponse( } if(cancellationToken.IsCancellationRequested && !throwOnCancellation) + { + IsWriting = false; yield break; + } + + SetLastResponse(null); await _chatHistoryStorage.SaveMessages( UserId, TopicId, message, sb.ToString(), _clock.GetCurrentTime(), cancellationToken); @@ -131,29 +165,22 @@ private async Task> LoadHistory(CancellationT if (_isNew) return Enumerable.Empty(); return await _chatHistoryStorage.GetMessages(UserId, TopicId, cancellationToken); } - - public void Stop() + + + /// Returns topic messages history. + public Task> GetMessages( + CancellationToken cancellationToken = default) { - _cts?.Cancel(); + return _chatHistoryStorage.GetMessages(UserId, TopicId, cancellationToken); } - public void Dispose() + private void SetLastResponse(ChatCompletionResponse? response) { - _cts?.Dispose(); - if (_clearOnDisposal) - { - // TODO: log warning about sync disposal - _chatHistoryStorage.DeleteTopic(UserId, TopicId, default) - .GetAwaiter().GetResult(); - } + LastResponse = response; } - public async ValueTask DisposeAsync() + public void Stop() { - _cts?.Dispose(); - if (_clearOnDisposal) - { - await _chatHistoryStorage.DeleteTopic(UserId, TopicId, default); - } + _cts?.Cancel(); } } \ No newline at end of file diff --git a/OpenAI.ChatGpt/ChatGPT.cs b/OpenAI.ChatGpt/ChatGPT.cs index cfb04e7..383a540 100644 --- a/OpenAI.ChatGpt/ChatGPT.cs +++ b/OpenAI.ChatGpt/ChatGPT.cs @@ -13,7 +13,7 @@ public class ChatGPT : IDisposable private readonly string _userId; private readonly IChatHistoryStorage _chatHistoryStorage; private readonly ITimeProvider _clock; - private readonly ChatCompletionsConfig? _config; + private readonly ChatGPTConfig? _config; private readonly OpenAiClient _client; private Chat? _currentChat; @@ -25,7 +25,7 @@ public ChatGPT( IChatHistoryStorage chatHistoryStorage, ITimeProvider clock, string userId, - ChatCompletionsConfig? config) + ChatGPTConfig? config) { _client = client ?? throw new ArgumentNullException(nameof(client)); _userId = userId ?? throw new ArgumentNullException(nameof(userId)); @@ -41,7 +41,7 @@ public ChatGPT( OpenAiClient client, IChatHistoryStorage chatHistoryStorage, ITimeProvider clock, - ChatCompletionsConfig? config) + ChatGPTConfig? config) { _client = client ?? throw new ArgumentNullException(nameof(client)); _chatHistoryStorage = chatHistoryStorage ?? throw new ArgumentNullException(nameof(chatHistoryStorage)); @@ -55,7 +55,7 @@ public ChatGPT( /// public static Task CreateInMemoryChat( string apiKey, - ChatCompletionsConfig? config = null, + ChatGPTConfig? config = null, UserOrSystemMessage? initialDialog = null, ITimeProvider? clock = null) { @@ -85,12 +85,12 @@ public async Task ContinueOrStartNewTopic( /// Starts a new topic. public async Task StartNewTopic( string? name = null, - ChatCompletionsConfig? config = null, + ChatGPTConfig? config = null, UserOrSystemMessage? initialDialog = null, bool clearOnDisposal = false, CancellationToken cancellationToken = default) { - config = ChatCompletionsConfig.CombineOrDefault(_config, config); + config = ChatGPTConfig.CombineOrDefault(_config, config); var topic = new Topic(_chatHistoryStorage.NewTopicId(), _userId, name, _clock.GetCurrentTime(), config); await _chatHistoryStorage.AddTopic(topic, cancellationToken); initialDialog ??= config.GetInitialDialogOrNull(); diff --git a/OpenAI.ChatGpt/Models/ChatCompletion/ChatCompletionResponse.cs b/OpenAI.ChatGpt/Models/ChatCompletion/ChatCompletionResponse.cs index 559e2d9..200b43f 100644 --- a/OpenAI.ChatGpt/Models/ChatCompletion/ChatCompletionResponse.cs +++ b/OpenAI.ChatGpt/Models/ChatCompletion/ChatCompletionResponse.cs @@ -66,4 +66,17 @@ public class _Usage [JsonPropertyName("total_tokens")] public long TotalTokens { get; set; } } + + public string GetMessageContent() + { + if(Choices.Length == 0) + { + throw new InvalidOperationException("Choices is empty"); + } + if (Choices[0].Message == null) + { + throw new InvalidOperationException("Message is null"); + } + return Choices[0].Message!.Content; + } } diff --git a/OpenAI.ChatGpt/Models/ChatCompletionsConfig.cs b/OpenAI.ChatGpt/Models/ChatGPTConfig.cs similarity index 85% rename from OpenAI.ChatGpt/Models/ChatCompletionsConfig.cs rename to OpenAI.ChatGpt/Models/ChatGPTConfig.cs index 1e29aa6..77c6cdb 100644 --- a/OpenAI.ChatGpt/Models/ChatCompletionsConfig.cs +++ b/OpenAI.ChatGpt/Models/ChatGPTConfig.cs @@ -1,11 +1,13 @@ +using System.ComponentModel.DataAnnotations; using OpenAI.ChatGpt.Models.ChatCompletion; using OpenAI.ChatGpt.Models.ChatCompletion.Messaging; namespace OpenAI.ChatGpt.Models; -public class ChatCompletionsConfig +// ReSharper disable once InconsistentNaming +public class ChatGPTConfig { - public static ChatCompletionsConfig Default => new() + public static ChatGPTConfig Default => new() { PassUserIdToOpenAiRequests = true }; @@ -53,6 +55,7 @@ public string? Model /// while lower values like 0.2 will make it more focused and deterministic. /// Predefined values: /// + [Range(ChatCompletionTemperatures.Minimum, ChatCompletionTemperatures.Maximum)] public float? Temperature { get => _temperature; @@ -84,17 +87,17 @@ internal void ModifyRequest(ChatCompletionRequest request) } /// - /// Merges two s with respect to . + /// Merges two s with respect to . /// - public static ChatCompletionsConfig? Combine( - ChatCompletionsConfig? baseConfig, - ChatCompletionsConfig? config) + public static ChatGPTConfig? Combine( + ChatGPTConfig? baseConfig, + ChatGPTConfig? config) { if (baseConfig is null && config is null) return null; if (baseConfig is null) return config; if (config is null) return baseConfig; - var result = new ChatCompletionsConfig() + var result = new ChatGPTConfig() { _model = config._model ?? baseConfig._model, _maxTokens = config._maxTokens ?? baseConfig._maxTokens, @@ -107,8 +110,8 @@ internal void ModifyRequest(ChatCompletionRequest request) return result; } - public static ChatCompletionsConfig CombineOrDefault( - ChatCompletionsConfig? baseConfig, ChatCompletionsConfig? config) + public static ChatGPTConfig CombineOrDefault( + ChatGPTConfig? baseConfig, ChatGPTConfig? config) { return Combine(baseConfig, config) ?? Default; } diff --git a/OpenAI.ChatGpt/Models/Topic.cs b/OpenAI.ChatGpt/Models/Topic.cs index 12bba62..4eeb9d5 100644 --- a/OpenAI.ChatGpt/Models/Topic.cs +++ b/OpenAI.ChatGpt/Models/Topic.cs @@ -12,7 +12,7 @@ internal Topic( string userId, string? name, DateTimeOffset createdAt, - ChatCompletionsConfig config) + ChatGPTConfig config) { Id = id; UserId = userId ?? throw new ArgumentNullException(nameof(userId)); @@ -25,5 +25,5 @@ internal Topic( public string UserId { get; set; } public string? Name { get; set; } public DateTimeOffset CreatedAt { get; set; } - public ChatCompletionsConfig Config { get; set; } + public ChatGPTConfig Config { get; set; } } \ No newline at end of file diff --git a/OpenAI.ChatGpt/OpenAI.ChatGpt.csproj b/OpenAI.ChatGpt/OpenAI.ChatGpt.csproj index 48226c3..b90345d 100644 --- a/OpenAI.ChatGpt/OpenAI.ChatGpt.csproj +++ b/OpenAI.ChatGpt/OpenAI.ChatGpt.csproj @@ -5,12 +5,12 @@ enable 11 Rodion Mostovoi - OpenAI ChatGPT Integration for .NET + OpenAI ChatGPT integration for .NET true OpenAI.ChatGPT https://github.com/rodion-m/ChatGPT_API_dotnet OpenAI ChatGPT integration for .NET - 2.1.0 + 2.2.0 .NET integration for ChatGPT with streaming responses supporting (like ChatGPT) via async streams. https://github.com/rodion-m/ChatGPT_API_dotnet MIT diff --git a/OpenAI.ChatGpt/OpenAIClient.cs b/OpenAI.ChatGpt/OpenAIClient.cs index 4ad5f8a..9695b26 100644 --- a/OpenAI.ChatGpt/OpenAIClient.cs +++ b/OpenAI.ChatGpt/OpenAIClient.cs @@ -108,8 +108,8 @@ public async Task GetChatCompletions( false, requestModifier ); - var res = await GetChatCompletions(request, cancellationToken); - return res.Choices[0].Message!.Content; + var response = await GetChatCompletionsRaw(request, cancellationToken); + return response.Choices[0].Message!.Content; } public async Task GetChatCompletions( @@ -131,11 +131,34 @@ public async Task GetChatCompletions( false, requestModifier ); - var res = await GetChatCompletions(request, cancellationToken); - return res.Choices[0].Message!.Content; + var response = await GetChatCompletionsRaw(request, cancellationToken); + return response.GetMessageContent(); + } + + public async Task GetChatCompletionsRaw( + IEnumerable messages, + int maxTokens = ChatCompletionRequest.MaxTokensDefault, + string model = ChatCompletionModels.Default, + float temperature = ChatCompletionTemperatures.Default, + string? user = null, + Action? requestModifier = null, + CancellationToken cancellationToken = default) + { + if (messages == null) throw new ArgumentNullException(nameof(messages)); + if (model == null) throw new ArgumentNullException(nameof(model)); + var request = CreateChatCompletionRequest(messages, + maxTokens, + model, + temperature, + user, + false, + requestModifier + ); + var response = await GetChatCompletionsRaw(request, cancellationToken); + return response; } - internal async Task GetChatCompletions( + internal async Task GetChatCompletionsRaw( ChatCompletionRequest request, CancellationToken cancellationToken = default) { @@ -146,8 +169,7 @@ internal async Task GetChatCompletions( cancellationToken: cancellationToken, options: _nullIgnoreSerializerOptions ); - var responseContent = await response.Content - .ReadAsStringAsync(cancellationToken); + var responseContent = await response.Content.ReadAsStringAsync(cancellationToken); if (!response.IsSuccessStatusCode) { @@ -165,16 +187,16 @@ internal async Task GetChatCompletions( /// The length of the response /// One of /// - /// What sampling temperature to use, between 0 and 2. - /// Higher values like 0.8 will make the output more random, - /// while lower values like 0.2 will make it more focused and deterministic. + /// What sampling temperature to use, between 0 and 2. + /// Higher values like 0.8 will make the output more random, + /// while lower values like 0.2 will make it more focused and deterministic. /// /// - /// A unique identifier representing your end-user, which can help OpenAI to monitor - /// and detect abuse. + /// A unique identifier representing your end-user, which can help OpenAI to monitor + /// and detect abuse. /// /// A modifier of the raw request. Allows to specify any custom properties. - /// Cancellation token + /// Cancellation token. /// Chunks of ChatGPT's response, one by one. public IAsyncEnumerable StreamChatCompletions( IEnumerable messages, diff --git a/samples/ChatGpt.ConsoleExample/Program.cs b/samples/ChatGpt.ConsoleExample/Program.cs index 2ad1959..076e9db 100644 --- a/samples/ChatGpt.ConsoleExample/Program.cs +++ b/samples/ChatGpt.ConsoleExample/Program.cs @@ -7,7 +7,7 @@ Console.WriteLine("Welcome to ChatGPT Console!"); var apiKey = LoadApiKey(); -var config = new ChatCompletionsConfig() { MaxTokens = 300 }; +var config = new ChatGPTConfig() { MaxTokens = 300 }; await using Chat chat = await ChatGPT.CreateInMemoryChat(apiKey, config); Console.Write("User: "); diff --git a/samples/ChatGpt.SpectreConsoleExample/Program.cs b/samples/ChatGpt.SpectreConsoleExample/Program.cs index dddff5d..fd8d384 100644 --- a/samples/ChatGpt.SpectreConsoleExample/Program.cs +++ b/samples/ChatGpt.SpectreConsoleExample/Program.cs @@ -12,7 +12,7 @@ var apiKey = LoadApiKey(); await using Chat chat = await ChatGPT.CreateInMemoryChat( apiKey, - config: new ChatCompletionsConfig() { MaxTokens = 200 }, + config: new ChatGPTConfig() { MaxTokens = 200 }, initialDialog: Dialog.StartAsSystem($"You are helpful assistant for a person named {name}.") ); SetupCancellation(chat); diff --git a/tests/OpenAI.ChatGpt.IntegrationTests/ChatGptTests.cs b/tests/OpenAI.ChatGpt.IntegrationTests/ChatGptTests.cs index 1025bbe..7aa2528 100644 --- a/tests/OpenAI.ChatGpt.IntegrationTests/ChatGptTests.cs +++ b/tests/OpenAI.ChatGpt.IntegrationTests/ChatGptTests.cs @@ -37,7 +37,7 @@ await FluentActions.Invoking( private static async Task CreateInMemoryChat() { return await ChatGPT.CreateInMemoryChat(Helpers.GetKeyFromEnvironment("OPENAI_API_KEY"), - new ChatCompletionsConfig() + new ChatGPTConfig() { MaxTokens = 100 }); diff --git a/tests/OpenAI.ChatGpt.UnitTests/DependencyInjectionTests/ChatGptServiceCollectionExtensionsTests.cs b/tests/OpenAI.ChatGpt.UnitTests/DependencyInjectionTests/ChatGptServiceCollectionExtensionsTests.cs index e54fca8..c0b7374 100644 --- a/tests/OpenAI.ChatGpt.UnitTests/DependencyInjectionTests/ChatGptServiceCollectionExtensionsTests.cs +++ b/tests/OpenAI.ChatGpt.UnitTests/DependencyInjectionTests/ChatGptServiceCollectionExtensionsTests.cs @@ -23,8 +23,8 @@ public void AddChatGptCoreIntegration_added_expected_services() services.Count.Should().BeGreaterThan(initialServiceCount); using var provider = services.BuildServiceProvider(); - provider.GetRequiredService>(); - provider.GetRequiredService>(); + provider.GetRequiredService>(); + provider.GetRequiredService>(); provider.GetRequiredService(); provider.GetRequiredService(); @@ -100,8 +100,8 @@ IConfiguration CreateConfiguration() var builder = new ConfigurationBuilder() .AddInMemoryCollection(new Dictionary() { - { $"{CredentialsConfigSectionPathDefault}:{nameof(ChatGptCredentials.ApiKey)}", "test-api-key" }, - { CompletionsConfigSectionPathDefault, ""}, + { $"{CredentialsConfigSectionPathDefault}:{nameof(OpenAICredentials.ApiKey)}", "test-api-key" }, + { CchatGPTConfigSectionPathDefault, ""}, }); return builder.Build(); } diff --git a/tests/OpenAI.ChatGpt.UnitTests/TopicStorage/AbstractTopicStorageTests.cs b/tests/OpenAI.ChatGpt.UnitTests/TopicStorage/AbstractTopicStorageTests.cs index dc7c5ce..c566d7c 100644 --- a/tests/OpenAI.ChatGpt.UnitTests/TopicStorage/AbstractTopicStorageTests.cs +++ b/tests/OpenAI.ChatGpt.UnitTests/TopicStorage/AbstractTopicStorageTests.cs @@ -13,7 +13,7 @@ public AbstractTopicStorageTests(ITopicStorage topicStorage) public async Task Add_topic_for_user_and_retrieve_it() { var userId = "user-1"; - var topic = new Topic(Guid.NewGuid(), userId, "New Topic", DateTimeOffset.UtcNow, new ChatCompletionsConfig()); + var topic = new Topic(Guid.NewGuid(), userId, "New Topic", DateTimeOffset.UtcNow, new ChatGPTConfig()); await _topicStorage.AddTopic(topic, CancellationToken.None); var retrievedTopic = await _topicStorage.GetTopic(userId, topic.Id, CancellationToken.None); @@ -34,7 +34,7 @@ public async Task Get_most_recent_topic_for_user_with_no_topics_returns_null() public async Task Edit_topic_name_and_verify_changes() { var userId = "user-3"; - var topic = new Topic(Guid.NewGuid(), userId, "Old Name", DateTimeOffset.UtcNow, new ChatCompletionsConfig()); + var topic = new Topic(Guid.NewGuid(), userId, "Old Name", DateTimeOffset.UtcNow, new ChatGPTConfig()); await _topicStorage.AddTopic(topic, CancellationToken.None); var newName = "New Name"; @@ -48,7 +48,7 @@ public async Task Edit_topic_name_and_verify_changes() public async Task Delete_topic_and_verify_removal() { var userId = "user-4"; - var topic = new Topic(Guid.NewGuid(), userId, "Topic to delete", DateTimeOffset.UtcNow, new ChatCompletionsConfig()); + var topic = new Topic(Guid.NewGuid(), userId, "Topic to delete", DateTimeOffset.UtcNow, new ChatGPTConfig()); await _topicStorage.AddTopic(topic, CancellationToken.None); var deletionResult = await _topicStorage.DeleteTopic(userId, topic.Id, CancellationToken.None); @@ -62,9 +62,9 @@ await FluentActions.Invoking(() => _topicStorage.GetTopic(userId, topic.Id, Canc public async Task Retrieve_most_recent_topic_for_user_with_multiple_topics() { var userId = "user-5"; - var topic1 = new Topic(Guid.NewGuid(), userId, "Topic 1", DateTimeOffset.UtcNow.AddMinutes(-5), new ChatCompletionsConfig()); - var topic2 = new Topic(Guid.NewGuid(), userId, "Topic 2", DateTimeOffset.UtcNow.AddMinutes(-2), new ChatCompletionsConfig()); - var topic3 = new Topic(Guid.NewGuid(), userId, "Topic 3", DateTimeOffset.UtcNow.AddMinutes(-10), new ChatCompletionsConfig()); + var topic1 = new Topic(Guid.NewGuid(), userId, "Topic 1", DateTimeOffset.UtcNow.AddMinutes(-5), new ChatGPTConfig()); + var topic2 = new Topic(Guid.NewGuid(), userId, "Topic 2", DateTimeOffset.UtcNow.AddMinutes(-2), new ChatGPTConfig()); + var topic3 = new Topic(Guid.NewGuid(), userId, "Topic 3", DateTimeOffset.UtcNow.AddMinutes(-10), new ChatGPTConfig()); await _topicStorage.AddTopic(topic1, CancellationToken.None); await _topicStorage.AddTopic(topic2, CancellationToken.None);