diff --git a/OpenAI.ChatGpt.AspNetCore/ChatGPTFactory.cs b/OpenAI.ChatGpt.AspNetCore/ChatGPTFactory.cs index cb378cc..b26022d 100644 --- a/OpenAI.ChatGpt.AspNetCore/ChatGPTFactory.cs +++ b/OpenAI.ChatGpt.AspNetCore/ChatGPTFactory.cs @@ -14,6 +14,7 @@ namespace OpenAI.ChatGpt.AspNetCore; /// .AddPolicyHandler(GetRetryPolicy()) /// .AddPolicyHandler(GetCircuitBreakerPolicy()); /// +[Fody.ConfigureAwait(false)] // ReSharper disable once InconsistentNaming public class ChatGPTFactory : IDisposable { @@ -22,6 +23,7 @@ public class ChatGPTFactory : IDisposable private readonly IChatHistoryStorage _chatHistoryStorage; private readonly ITimeProvider _clock; private bool _ensureStorageCreatedCalled; + private readonly bool _isHttpClientInjected; public ChatGPTFactory( IHttpClientFactory httpClientFactory, @@ -36,9 +38,10 @@ public ChatGPTFactory( _chatHistoryStorage = chatHistoryStorage ?? throw new ArgumentNullException(nameof(chatHistoryStorage)); _clock = clock ?? throw new ArgumentNullException(nameof(clock)); _client = CreateOpenAiClient(httpClientFactory, credentials); + _isHttpClientInjected = true; } - public ChatGPTFactory( + internal ChatGPTFactory( IOptions credentials, IOptions config, IChatHistoryStorage chatHistoryStorage, @@ -120,6 +123,9 @@ public async Task Create( public void Dispose() { - _client.Dispose(); + if (!_isHttpClientInjected) + { + _client.Dispose(); + } } } \ No newline at end of file diff --git a/OpenAI.ChatGpt.AspNetCore/OpenAI.ChatGpt.AspNetCore.csproj b/OpenAI.ChatGpt.AspNetCore/OpenAI.ChatGpt.AspNetCore.csproj index 6fbc3ff..5a55d85 100644 --- a/OpenAI.ChatGpt.AspNetCore/OpenAI.ChatGpt.AspNetCore.csproj +++ b/OpenAI.ChatGpt.AspNetCore/OpenAI.ChatGpt.AspNetCore.csproj @@ -8,7 +8,7 @@ OpenAI.ChatGPT.AspNetCore https://github.com/rodion-m/ChatGPT_API_dotnet OpenAI ChatGPT integration for .NET with DI - 2.0.2 + 2.0.3 OpenAI Chat Completions API (ChatGPT) integration with easy DI supporting (Microsoft.Extensions.DependencyInjection). It allows you to use the API in your .NET applications. Also, the client supports streaming responses (like ChatGPT) via async streams. https://github.com/rodion-m/ChatGPT_API_dotnet net6.0;net7.0 @@ -23,6 +23,11 @@ + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + diff --git a/OpenAI.ChatGpt.EntityFrameworkCore/CachedChatHistoryStorageDecorator.cs b/OpenAI.ChatGpt.EntityFrameworkCore/CachedChatHistoryStorageDecorator.cs index fdb6fdb..70eba0e 100644 --- a/OpenAI.ChatGpt.EntityFrameworkCore/CachedChatHistoryStorageDecorator.cs +++ b/OpenAI.ChatGpt.EntityFrameworkCore/CachedChatHistoryStorageDecorator.cs @@ -3,6 +3,7 @@ namespace OpenAI.ChatGpt.EntityFrameworkCore; +[Fody.ConfigureAwait(false)] public class CachedChatHistoryStorageDecorator : IChatHistoryStorage { private readonly IChatHistoryStorage _chatHistoryStorage; @@ -21,9 +22,9 @@ public CachedChatHistoryStorageDecorator( _cacheConfig = cacheConfig.Value; } - private string GetUserTopicsKey(string userId) => $"chatbot_topics_{userId}"; - private string GetMessagesKey(Guid topicId) => $"chatbot_messages_{topicId}"; - private string GetTopicKey(Guid topicId) => $"chatbot_topic_{topicId}"; + private static string GetUserTopicsKey(string userId) => $"chatbot_topics_{userId}"; + private static string GetMessagesKey(Guid topicId) => $"chatbot_messages_{topicId}"; + private static string GetTopicKey(Guid topicId) => $"chatbot_topic_{topicId}"; /// public Task> GetTopics(string userId, CancellationToken cancellationToken) diff --git a/OpenAI.ChatGpt.EntityFrameworkCore/ChatHistoryCacheConfig.cs b/OpenAI.ChatGpt.EntityFrameworkCore/ChatHistoryCacheConfig.cs index 67ab6bc..2c287ab 100644 --- a/OpenAI.ChatGpt.EntityFrameworkCore/ChatHistoryCacheConfig.cs +++ b/OpenAI.ChatGpt.EntityFrameworkCore/ChatHistoryCacheConfig.cs @@ -2,6 +2,18 @@ namespace OpenAI.ChatGpt.EntityFrameworkCore; public class ChatHistoryCacheConfig { - public TimeSpan? MessagesSlidingExpiration { get; set; } = TimeSpan.FromMinutes(10); - public TimeSpan? TopicsSlidingExpiration { get; set; } = TimeSpan.FromMinutes(10); + private TimeSpan? _messagesSlidingExpiration; + private TimeSpan? _topicsSlidingExpiration; + + public TimeSpan MessagesSlidingExpiration + { + get => _messagesSlidingExpiration ?? TimeSpan.FromMinutes(10); + set => _messagesSlidingExpiration = value; + } + + public TimeSpan TopicsSlidingExpiration + { + get => _topicsSlidingExpiration ?? TimeSpan.FromMinutes(10); + set => _topicsSlidingExpiration = value; + } } \ No newline at end of file diff --git a/OpenAI.ChatGpt.EntityFrameworkCore/EfChatHistoryStorage.cs b/OpenAI.ChatGpt.EntityFrameworkCore/EfChatHistoryStorage.cs index e8e8f88..54a8700 100644 --- a/OpenAI.ChatGpt.EntityFrameworkCore/EfChatHistoryStorage.cs +++ b/OpenAI.ChatGpt.EntityFrameworkCore/EfChatHistoryStorage.cs @@ -2,6 +2,7 @@ namespace OpenAI.ChatGpt.EntityFrameworkCore; +[Fody.ConfigureAwait(false)] public class EfChatHistoryStorage : IChatHistoryStorage { private readonly ChatGptDbContext _dbContext; @@ -12,15 +13,16 @@ public EfChatHistoryStorage(ChatGptDbContext dbContext) } /// - public async Task> GetTopics(string userId, - CancellationToken cancellationToken) + public async Task> GetTopics( + string userId, CancellationToken cancellationToken) { if (userId == null) throw new ArgumentNullException(nameof(userId)); return await _dbContext.Topics.ToListAsync(cancellationToken: cancellationToken); } /// - public async Task GetTopic(string userId, Guid topicId, CancellationToken cancellationToken) + public async Task GetTopic( + string userId, Guid topicId, CancellationToken cancellationToken) { if (userId == null) throw new ArgumentNullException(nameof(userId)); var topic = await _dbContext.Topics.FirstOrDefaultAsync( diff --git a/OpenAI.ChatGpt.EntityFrameworkCore/OpenAI.ChatGpt.EntityFrameworkCore.csproj b/OpenAI.ChatGpt.EntityFrameworkCore/OpenAI.ChatGpt.EntityFrameworkCore.csproj index 443fdd7..4f7fd19 100644 --- a/OpenAI.ChatGpt.EntityFrameworkCore/OpenAI.ChatGpt.EntityFrameworkCore.csproj +++ b/OpenAI.ChatGpt.EntityFrameworkCore/OpenAI.ChatGpt.EntityFrameworkCore.csproj @@ -9,7 +9,7 @@ OpenAI.ChatGPT.EntityFrameworkCore https://github.com/rodion-m/ChatGPT_API_dotnet OpenAI ChatGPT integration for .NET with EF Core storage - 2.0.2 + 2.0.3 OpenAI Chat Completions API (ChatGPT) integration with DI and EF Core supporting. It allows you to use the API in your .NET applications. Also, the client supports streaming responses (like ChatGPT) via async streams. https://github.com/rodion-m/ChatGPT_API_dotnet net6.0;net7.0 @@ -20,11 +20,17 @@ + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + all runtime; build; native; contentfiles; analyzers; buildtransitive + diff --git a/OpenAI.ChatGpt/AsyncEnumerableExtensions.cs b/OpenAI.ChatGpt/AsyncEnumerableExtensions.cs index 6ba95c2..6005443 100644 --- a/OpenAI.ChatGpt/AsyncEnumerableExtensions.cs +++ b/OpenAI.ChatGpt/AsyncEnumerableExtensions.cs @@ -1,9 +1,11 @@ namespace OpenAI.ChatGpt; +[Fody.ConfigureAwait(false)] public static class AsyncEnumerableExtensions { - public static async IAsyncEnumerable ThrowOnCancellation( - this IAsyncEnumerable stream, bool throwOnCancellation) where T: class + internal static async IAsyncEnumerable ConfigureExceptions( + this IAsyncEnumerable stream, + bool throwOnCancellation) where T: class { if (stream == null) throw new ArgumentNullException(nameof(stream)); var enumerator = stream.GetAsyncEnumerator(); @@ -13,14 +15,14 @@ public static async IAsyncEnumerable ThrowOnCancellation( { try { - hasResult = await enumerator.MoveNextAsync().ConfigureAwait(false); + hasResult = await enumerator.MoveNextAsync(); result = hasResult ? enumerator.Current : null; } catch (OperationCanceledException) { if (throwOnCancellation) { - await enumerator.DisposeAsync().ConfigureAwait(false); + await enumerator.DisposeAsync(); throw; } } @@ -30,6 +32,6 @@ public static async IAsyncEnumerable ThrowOnCancellation( } } - await enumerator.DisposeAsync().ConfigureAwait(false); + await enumerator.DisposeAsync(); } } \ No newline at end of file diff --git a/OpenAI.ChatGpt/Chat.cs b/OpenAI.ChatGpt/Chat.cs index eece899..2f04487 100644 --- a/OpenAI.ChatGpt/Chat.cs +++ b/OpenAI.ChatGpt/Chat.cs @@ -78,19 +78,23 @@ await _chatHistoryStorage.SaveMessages( public IAsyncEnumerable StreamNextMessageResponse( string message, + bool throwOnCancellation = true, CancellationToken cancellationToken = default) { if (message == null) throw new ArgumentNullException(nameof(message)); var chatCompletionMessage = new UserMessage(message); - return StreamNextMessageResponse(chatCompletionMessage, cancellationToken); + return StreamNextMessageResponse(chatCompletionMessage, throwOnCancellation, cancellationToken); } private async IAsyncEnumerable StreamNextMessageResponse( UserOrSystemMessage message, + bool throwOnCancellation, [EnumeratorCancellation] CancellationToken cancellationToken) { - _cts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken); - _cts.Token.Register(() => IsWriting = false); + var originalCancellationToken = cancellationToken; + _cts = CancellationTokenSource.CreateLinkedTokenSource(originalCancellationToken); + cancellationToken = _cts.Token; + cancellationToken.Register(() => IsWriting = false); var history = await LoadHistory(cancellationToken); var messages = history.Append(message); @@ -100,16 +104,21 @@ private async IAsyncEnumerable StreamNextMessageResponse( messages, user: Topic.Config.PassUserIdToOpenAiRequests is true ? UserId : null, requestModifier: Topic.Config.ModifyRequest, - cancellationToken: _cts.Token + cancellationToken: cancellationToken ); - await foreach (var chunk in stream.WithCancellation(cancellationToken)) + await foreach (var chunk in stream + .ConfigureExceptions(throwOnCancellation) + .WithCancellation(cancellationToken)) { sb.Append(chunk); yield return chunk; } + + if(cancellationToken.IsCancellationRequested && !throwOnCancellation) + yield break; await _chatHistoryStorage.SaveMessages( - UserId, ChatId, message, sb.ToString(), _clock.GetCurrentTime(), _cts.Token); + UserId, ChatId, message, sb.ToString(), _clock.GetCurrentTime(), cancellationToken); IsWriting = false; _isNew = false; } diff --git a/OpenAI.ChatGpt/ChatGPT.cs b/OpenAI.ChatGpt/ChatGPT.cs index 3dcd219..dec9e75 100644 --- a/OpenAI.ChatGpt/ChatGPT.cs +++ b/OpenAI.ChatGpt/ChatGPT.cs @@ -5,7 +5,8 @@ namespace OpenAI.ChatGpt; -/// Chat conversations provider +/// Chat conversations provider. +[Fody.ConfigureAwait(false)] // ReSharper disable once InconsistentNaming public class ChatGPT : IDisposable { diff --git a/OpenAI.ChatGpt/HttpClientExtensions.cs b/OpenAI.ChatGpt/HttpClientExtensions.cs index 9b24a96..bb8d515 100644 --- a/OpenAI.ChatGpt/HttpClientExtensions.cs +++ b/OpenAI.ChatGpt/HttpClientExtensions.cs @@ -6,6 +6,7 @@ namespace OpenAI.ChatGpt; +[Fody.ConfigureAwait(false)] internal static class HttpClientExtensions { private static readonly int DataHeaderLength = "data: ".Length; @@ -32,19 +33,17 @@ internal static async IAsyncEnumerable Content = JsonContent.Create(request, options: serializerOptions) }; requestMessage.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue("text/event-stream")); - using var response = await SendAsync().ConfigureAwait(false); + using var response = await SendAsync(); if (!response.IsSuccessStatusCode) { - var responseContent = await response.Content.ReadAsStringAsync(cancellationToken) - .ConfigureAwait(false); + var responseContent = await response.Content.ReadAsStringAsync(cancellationToken); throw new ServerSentEventsResponseException(response.StatusCode, responseContent); } - await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken) - .ConfigureAwait(false); + await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken); var reader = new StreamReader(stream); - while (await ReadLineAsync(reader, cancellationToken).ConfigureAwait(false) is { } line) + while (await ReadLineAsync(reader, cancellationToken) is { } line) { cancellationToken.ThrowIfCancellationRequested(); var (result, data) = ProcessResponseEvent(line); @@ -99,6 +98,7 @@ Task SendAsync() TextReader reader, CancellationToken cancellationToken) { + ArgumentNullException.ThrowIfNull(reader); #if NET7_0_OR_GREATER return reader.ReadLineAsync(cancellationToken); #else diff --git a/OpenAI.ChatGpt/OpenAI.ChatGpt.csproj b/OpenAI.ChatGpt/OpenAI.ChatGpt.csproj index dd040ee..c430b1e 100644 --- a/OpenAI.ChatGpt/OpenAI.ChatGpt.csproj +++ b/OpenAI.ChatGpt/OpenAI.ChatGpt.csproj @@ -10,7 +10,7 @@ OpenAI.ChatGPT https://github.com/rodion-m/ChatGPT_API_dotnet OpenAI ChatGPT integration for .NET - 2.0.2 + 2.0.3 .NET integration for ChatGPT with streaming responses supporting (like ChatGPT) via async streams. https://github.com/rodion-m/ChatGPT_API_dotnet MIT @@ -24,5 +24,13 @@ + + + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + + diff --git a/OpenAI.ChatGpt/OpenAIClient.cs b/OpenAI.ChatGpt/OpenAIClient.cs index 1bfc07d..61ca885 100644 --- a/OpenAI.ChatGpt/OpenAIClient.cs +++ b/OpenAI.ChatGpt/OpenAIClient.cs @@ -11,6 +11,7 @@ namespace OpenAI.ChatGpt; /// Thread-safe OpenAI client. +[Fody.ConfigureAwait(false)] public class OpenAiClient : IDisposable { private const string DefaultHost = "https://api.openai.com/v1/"; @@ -140,10 +141,9 @@ internal async Task GetChatCompletions( request, cancellationToken: cancellationToken, options: _nullIgnoreSerializerOptions - ).ConfigureAwait(false); + ); var responseContent = await response.Content - .ReadAsStringAsync(cancellationToken) - .ConfigureAwait(false); + .ReadAsStringAsync(cancellationToken); if (!response.IsSuccessStatusCode) { @@ -290,9 +290,8 @@ internal async Task GenerateImageBytes( ImagesEndpoint, request, cancellationToken: cancellationToken - ).ConfigureAwait(false); - var responseContent = await response.Content.ReadAsStringAsync(cancellationToken) - .ConfigureAwait(false); + ); + var responseContent = await response.Content.ReadAsStringAsync(cancellationToken); if (!response.IsSuccessStatusCode) { @@ -321,9 +320,8 @@ internal async Task GenerateImagesUris( request, options: _nullIgnoreSerializerOptions, cancellationToken: cancellationToken - ).ConfigureAwait(false); - var responseContent = await response.Content.ReadAsStringAsync(cancellationToken) - .ConfigureAwait(false); + ); + var responseContent = await response.Content.ReadAsStringAsync(cancellationToken); if (!response.IsSuccessStatusCode) { diff --git a/samples/ChatGpt.SpectreConsoleExample/Program.cs b/samples/ChatGpt.SpectreConsoleExample/Program.cs index 595fccf..60d3931 100644 --- a/samples/ChatGpt.SpectreConsoleExample/Program.cs +++ b/samples/ChatGpt.SpectreConsoleExample/Program.cs @@ -21,8 +21,7 @@ while (Console.Ask($"[underline green]{name}[/]: ") is { } userMessage) { Console.Markup("[underline red]ChatGPT[/]: "); - var stream = chat.StreamNextMessageResponse(userMessage) - .ThrowOnCancellation(false); + var stream = chat.StreamNextMessageResponse(userMessage, throwOnCancellation: false); await foreach (string chunk in stream.SkipWhile(string.IsNullOrWhiteSpace)) { if (!chat.IsCancelled) Console.Write(chunk); diff --git a/tests/OpenAI.ChatGpt.IntegrationTests/ChatGptTests.cs b/tests/OpenAI.ChatGpt.IntegrationTests/ChatGptTests.cs new file mode 100644 index 0000000..1025bbe --- /dev/null +++ b/tests/OpenAI.ChatGpt.IntegrationTests/ChatGptTests.cs @@ -0,0 +1,45 @@ +namespace OpenAI.ChatGpt.IntegrationTests; + +public class ChatGptTests +{ + [Fact] + public async void Stream_chatgpt_response_cancellation_throws_exception() + { + Chat chat = await CreateInMemoryChat(); + const string text = "Write numbers from 1 to 50"; + await FluentActions.Invoking( + async () => + { + await foreach (var _ in chat.StreamNextMessageResponse(text)) + { + chat.Stop(); + } + }) + .Should().ThrowAsync(); + } + + [Fact] + public async void Stream_chatgpt_response_cancellation_with_throwOnCancellation_false_stopped_silently() + { + Chat chat = await CreateInMemoryChat(); + const string text = "Write numbers from 1 to 50"; + await FluentActions.Invoking( + async () => + { + await foreach (var _ in chat.StreamNextMessageResponse(text, throwOnCancellation: false)) + { + chat.Stop(); + } + }) + .Should().NotThrowAsync(); + } + + private static async Task CreateInMemoryChat() + { + return await ChatGPT.CreateInMemoryChat(Helpers.GetKeyFromEnvironment("OPENAI_API_KEY"), + new ChatCompletionsConfig() + { + MaxTokens = 100 + }); + } +} \ No newline at end of file diff --git a/tests/OpenAI.ChatGpt.IntegrationTests/OpenAI.ChatGpt.IntegrationTests.csproj b/tests/OpenAI.ChatGpt.IntegrationTests/OpenAI.ChatGpt.IntegrationTests.csproj index 3c1fd08..b7e14b8 100644 --- a/tests/OpenAI.ChatGpt.IntegrationTests/OpenAI.ChatGpt.IntegrationTests.csproj +++ b/tests/OpenAI.ChatGpt.IntegrationTests/OpenAI.ChatGpt.IntegrationTests.csproj @@ -14,6 +14,7 @@ + runtime; build; native; contentfiles; analyzers; buildtransitive