Skip to content

Commit

Permalink
Replace ConfigureAwait(false) to Fody.ConfigureAwait; Add throwOnCanc…
Browse files Browse the repository at this point in the history
…ellation parameter to StreamNextMessageResponse method
  • Loading branch information
rodion-m committed Apr 18, 2023
1 parent 4df91ea commit 307af40
Show file tree
Hide file tree
Showing 15 changed files with 137 additions and 42 deletions.
10 changes: 8 additions & 2 deletions OpenAI.ChatGpt.AspNetCore/ChatGPTFactory.cs
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ namespace OpenAI.ChatGpt.AspNetCore;
/// .AddPolicyHandler(GetRetryPolicy())
/// .AddPolicyHandler(GetCircuitBreakerPolicy());
/// </example>
[Fody.ConfigureAwait(false)]
// ReSharper disable once InconsistentNaming
public class ChatGPTFactory : IDisposable
{
Expand All @@ -22,6 +23,7 @@ public class ChatGPTFactory : IDisposable
private readonly IChatHistoryStorage _chatHistoryStorage;
private readonly ITimeProvider _clock;
private bool _ensureStorageCreatedCalled;
private readonly bool _isHttpClientInjected;

public ChatGPTFactory(
IHttpClientFactory httpClientFactory,
Expand All @@ -36,9 +38,10 @@ public ChatGPTFactory(
_chatHistoryStorage = chatHistoryStorage ?? throw new ArgumentNullException(nameof(chatHistoryStorage));
_clock = clock ?? throw new ArgumentNullException(nameof(clock));
_client = CreateOpenAiClient(httpClientFactory, credentials);
_isHttpClientInjected = true;
}

public ChatGPTFactory(
internal ChatGPTFactory(
IOptions<ChatGptCredentials> credentials,
IOptions<ChatCompletionsConfig> config,
IChatHistoryStorage chatHistoryStorage,
Expand Down Expand Up @@ -120,6 +123,9 @@ public async Task<ChatGPT> Create(

public void Dispose()
{
_client.Dispose();
if (!_isHttpClientInjected)
{
_client.Dispose();
}
}
}
7 changes: 6 additions & 1 deletion OpenAI.ChatGpt.AspNetCore/OpenAI.ChatGpt.AspNetCore.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
<PackageId>OpenAI.ChatGPT.AspNetCore</PackageId>
<PackageProjectUrl>https://github.com/rodion-m/ChatGPT_API_dotnet</PackageProjectUrl>
<Product>OpenAI ChatGPT integration for .NET with DI</Product>
<Version>2.0.2</Version>
<Version>2.0.3</Version>
<Description>OpenAI Chat Completions API (ChatGPT) integration with easy DI supporting (Microsoft.Extensions.DependencyInjection). It allows you to use the API in your .NET applications. Also, the client supports streaming responses (like ChatGPT) via async streams.</Description>
<RepositoryUrl>https://github.com/rodion-m/ChatGPT_API_dotnet</RepositoryUrl>
<TargetFrameworks>net6.0;net7.0</TargetFrameworks>
Expand All @@ -23,6 +23,11 @@
</ItemGroup>

<ItemGroup>
<PackageReference Include="ConfigureAwait.Fody" Version="3.3.2" />
<PackageReference Include="Fody" Version="6.6.4">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.Extensions.Hosting" Version="7.0.1" />
<PackageReference Include="Microsoft.Extensions.Http" Version="7.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="7.0.1" />
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

namespace OpenAI.ChatGpt.EntityFrameworkCore;

[Fody.ConfigureAwait(false)]
public class CachedChatHistoryStorageDecorator : IChatHistoryStorage
{
private readonly IChatHistoryStorage _chatHistoryStorage;
Expand All @@ -21,9 +22,9 @@ public CachedChatHistoryStorageDecorator(
_cacheConfig = cacheConfig.Value;
}

private string GetUserTopicsKey(string userId) => $"chatbot_topics_{userId}";
private string GetMessagesKey(Guid topicId) => $"chatbot_messages_{topicId}";
private string GetTopicKey(Guid topicId) => $"chatbot_topic_{topicId}";
private static string GetUserTopicsKey(string userId) => $"chatbot_topics_{userId}";
private static string GetMessagesKey(Guid topicId) => $"chatbot_messages_{topicId}";
private static string GetTopicKey(Guid topicId) => $"chatbot_topic_{topicId}";

/// <inheritdoc/>
public Task<IEnumerable<Topic>> GetTopics(string userId, CancellationToken cancellationToken)
Expand Down
16 changes: 14 additions & 2 deletions OpenAI.ChatGpt.EntityFrameworkCore/ChatHistoryCacheConfig.cs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,18 @@ namespace OpenAI.ChatGpt.EntityFrameworkCore;

public class ChatHistoryCacheConfig
{
public TimeSpan? MessagesSlidingExpiration { get; set; } = TimeSpan.FromMinutes(10);
public TimeSpan? TopicsSlidingExpiration { get; set; } = TimeSpan.FromMinutes(10);
private TimeSpan? _messagesSlidingExpiration;
private TimeSpan? _topicsSlidingExpiration;

public TimeSpan MessagesSlidingExpiration
{
get => _messagesSlidingExpiration ?? TimeSpan.FromMinutes(10);
set => _messagesSlidingExpiration = value;
}

public TimeSpan TopicsSlidingExpiration
{
get => _topicsSlidingExpiration ?? TimeSpan.FromMinutes(10);
set => _topicsSlidingExpiration = value;
}
}
8 changes: 5 additions & 3 deletions OpenAI.ChatGpt.EntityFrameworkCore/EfChatHistoryStorage.cs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

namespace OpenAI.ChatGpt.EntityFrameworkCore;

[Fody.ConfigureAwait(false)]
public class EfChatHistoryStorage : IChatHistoryStorage
{
private readonly ChatGptDbContext _dbContext;
Expand All @@ -12,15 +13,16 @@ public EfChatHistoryStorage(ChatGptDbContext dbContext)
}

/// <inheritdoc />
public async Task<IEnumerable<Topic>> GetTopics(string userId,
CancellationToken cancellationToken)
public async Task<IEnumerable<Topic>> GetTopics(
string userId, CancellationToken cancellationToken)
{
if (userId == null) throw new ArgumentNullException(nameof(userId));
return await _dbContext.Topics.ToListAsync(cancellationToken: cancellationToken);
}

/// <inheritdoc />
public async Task<Topic> GetTopic(string userId, Guid topicId, CancellationToken cancellationToken)
public async Task<Topic> GetTopic(
string userId, Guid topicId, CancellationToken cancellationToken)
{
if (userId == null) throw new ArgumentNullException(nameof(userId));
var topic = await _dbContext.Topics.FirstOrDefaultAsync(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
<PackageId>OpenAI.ChatGPT.EntityFrameworkCore</PackageId>
<PackageProjectUrl>https://github.com/rodion-m/ChatGPT_API_dotnet</PackageProjectUrl>
<Product>OpenAI ChatGPT integration for .NET with EF Core storage</Product>
<Version>2.0.2</Version>
<Version>2.0.3</Version>
<Description>OpenAI Chat Completions API (ChatGPT) integration with DI and EF Core supporting. It allows you to use the API in your .NET applications. Also, the client supports streaming responses (like ChatGPT) via async streams.</Description>
<RepositoryUrl>https://github.com/rodion-m/ChatGPT_API_dotnet</RepositoryUrl>
<TargetFrameworks>net6.0;net7.0</TargetFrameworks>
Expand All @@ -20,11 +20,17 @@
</PropertyGroup>

<ItemGroup>
<PackageReference Include="ConfigureAwait.Fody" Version="3.3.2" />
<PackageReference Include="Fody" Version="6.6.4">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.EntityFrameworkCore" Version="7.0.4" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="7.0.4">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="System.Linq.Async" Version="6.0.1" />
</ItemGroup>

<ItemGroup>
Expand Down
12 changes: 7 additions & 5 deletions OpenAI.ChatGpt/AsyncEnumerableExtensions.cs
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
namespace OpenAI.ChatGpt;

[Fody.ConfigureAwait(false)]
public static class AsyncEnumerableExtensions
{
public static async IAsyncEnumerable<T> ThrowOnCancellation<T>(
this IAsyncEnumerable<T> stream, bool throwOnCancellation) where T: class
internal static async IAsyncEnumerable<T> ConfigureExceptions<T>(
this IAsyncEnumerable<T> stream,
bool throwOnCancellation) where T: class
{
if (stream == null) throw new ArgumentNullException(nameof(stream));
var enumerator = stream.GetAsyncEnumerator();
Expand All @@ -13,14 +15,14 @@ public static async IAsyncEnumerable<T> ThrowOnCancellation<T>(
{
try
{
hasResult = await enumerator.MoveNextAsync().ConfigureAwait(false);
hasResult = await enumerator.MoveNextAsync();
result = hasResult ? enumerator.Current : null;
}
catch (OperationCanceledException)
{
if (throwOnCancellation)
{
await enumerator.DisposeAsync().ConfigureAwait(false);
await enumerator.DisposeAsync();
throw;
}
}
Expand All @@ -30,6 +32,6 @@ public static async IAsyncEnumerable<T> ThrowOnCancellation<T>(
}
}

await enumerator.DisposeAsync().ConfigureAwait(false);
await enumerator.DisposeAsync();
}
}
21 changes: 15 additions & 6 deletions OpenAI.ChatGpt/Chat.cs
Original file line number Diff line number Diff line change
Expand Up @@ -78,19 +78,23 @@ await _chatHistoryStorage.SaveMessages(

public IAsyncEnumerable<string> StreamNextMessageResponse(
string message,
bool throwOnCancellation = true,
CancellationToken cancellationToken = default)
{
if (message == null) throw new ArgumentNullException(nameof(message));
var chatCompletionMessage = new UserMessage(message);
return StreamNextMessageResponse(chatCompletionMessage, cancellationToken);
return StreamNextMessageResponse(chatCompletionMessage, throwOnCancellation, cancellationToken);
}

private async IAsyncEnumerable<string> StreamNextMessageResponse(
UserOrSystemMessage message,
bool throwOnCancellation,
[EnumeratorCancellation] CancellationToken cancellationToken)
{
_cts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
_cts.Token.Register(() => IsWriting = false);
var originalCancellationToken = cancellationToken;
_cts = CancellationTokenSource.CreateLinkedTokenSource(originalCancellationToken);
cancellationToken = _cts.Token;
cancellationToken.Register(() => IsWriting = false);

var history = await LoadHistory(cancellationToken);
var messages = history.Append(message);
Expand All @@ -100,16 +104,21 @@ private async IAsyncEnumerable<string> StreamNextMessageResponse(
messages,
user: Topic.Config.PassUserIdToOpenAiRequests is true ? UserId : null,
requestModifier: Topic.Config.ModifyRequest,
cancellationToken: _cts.Token
cancellationToken: cancellationToken
);
await foreach (var chunk in stream.WithCancellation(cancellationToken))
await foreach (var chunk in stream
.ConfigureExceptions(throwOnCancellation)
.WithCancellation(cancellationToken))
{
sb.Append(chunk);
yield return chunk;
}

if(cancellationToken.IsCancellationRequested && !throwOnCancellation)
yield break;

await _chatHistoryStorage.SaveMessages(
UserId, ChatId, message, sb.ToString(), _clock.GetCurrentTime(), _cts.Token);
UserId, ChatId, message, sb.ToString(), _clock.GetCurrentTime(), cancellationToken);
IsWriting = false;
_isNew = false;
}
Expand Down
3 changes: 2 additions & 1 deletion OpenAI.ChatGpt/ChatGPT.cs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@

namespace OpenAI.ChatGpt;

/// <summary> Chat conversations provider </summary>
/// <summary> Chat conversations provider. </summary>
[Fody.ConfigureAwait(false)]
// ReSharper disable once InconsistentNaming
public class ChatGPT : IDisposable
{
Expand Down
12 changes: 6 additions & 6 deletions OpenAI.ChatGpt/HttpClientExtensions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

namespace OpenAI.ChatGpt;

[Fody.ConfigureAwait(false)]
internal static class HttpClientExtensions
{
private static readonly int DataHeaderLength = "data: ".Length;
Expand All @@ -32,19 +33,17 @@ internal static async IAsyncEnumerable<TResponse>
Content = JsonContent.Create(request, options: serializerOptions)
};
requestMessage.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue("text/event-stream"));
using var response = await SendAsync().ConfigureAwait(false);
using var response = await SendAsync();

if (!response.IsSuccessStatusCode)
{
var responseContent = await response.Content.ReadAsStringAsync(cancellationToken)
.ConfigureAwait(false);
var responseContent = await response.Content.ReadAsStringAsync(cancellationToken);
throw new ServerSentEventsResponseException(response.StatusCode, responseContent);
}

await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken)
.ConfigureAwait(false);
await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken);
var reader = new StreamReader(stream);
while (await ReadLineAsync(reader, cancellationToken).ConfigureAwait(false) is { } line)
while (await ReadLineAsync(reader, cancellationToken) is { } line)
{
cancellationToken.ThrowIfCancellationRequested();
var (result, data) = ProcessResponseEvent(line);
Expand Down Expand Up @@ -99,6 +98,7 @@ Task<HttpResponseMessage> SendAsync()
TextReader reader,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(reader);
#if NET7_0_OR_GREATER
return reader.ReadLineAsync(cancellationToken);
#else
Expand Down
10 changes: 9 additions & 1 deletion OpenAI.ChatGpt/OpenAI.ChatGpt.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
<PackageId>OpenAI.ChatGPT</PackageId>
<PackageProjectUrl>https://github.com/rodion-m/ChatGPT_API_dotnet</PackageProjectUrl>
<Product>OpenAI ChatGPT integration for .NET</Product>
<Version>2.0.2</Version>
<Version>2.0.3</Version>
<Description>.NET integration for ChatGPT with streaming responses supporting (like ChatGPT) via async streams.</Description>
<RepositoryUrl>https://github.com/rodion-m/ChatGPT_API_dotnet</RepositoryUrl>
<PackageLicenseExpression>MIT</PackageLicenseExpression>
Expand All @@ -24,5 +24,13 @@
<InternalsVisibleTo Include="OpenAI.ChatGpt.UnitTests" />
<InternalsVisibleTo Include="OpenAI.ChatGpt.IntegrationTests" />
</ItemGroup>

<ItemGroup>
<PackageReference Include="ConfigureAwait.Fody" Version="3.3.2" />
<PackageReference Include="Fody" Version="6.6.4">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>

</Project>
16 changes: 7 additions & 9 deletions OpenAI.ChatGpt/OpenAIClient.cs
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
namespace OpenAI.ChatGpt;

/// <summary> Thread-safe OpenAI client. </summary>
[Fody.ConfigureAwait(false)]
public class OpenAiClient : IDisposable
{
private const string DefaultHost = "https://api.openai.com/v1/";
Expand Down Expand Up @@ -140,10 +141,9 @@ internal async Task<ChatCompletionResponse> GetChatCompletions(
request,
cancellationToken: cancellationToken,
options: _nullIgnoreSerializerOptions
).ConfigureAwait(false);
);
var responseContent = await response.Content
.ReadAsStringAsync(cancellationToken)
.ConfigureAwait(false);
.ReadAsStringAsync(cancellationToken);

if (!response.IsSuccessStatusCode)
{
Expand Down Expand Up @@ -290,9 +290,8 @@ internal async Task<byte[]> GenerateImageBytes(
ImagesEndpoint,
request,
cancellationToken: cancellationToken
).ConfigureAwait(false);
var responseContent = await response.Content.ReadAsStringAsync(cancellationToken)
.ConfigureAwait(false);
);
var responseContent = await response.Content.ReadAsStringAsync(cancellationToken);

if (!response.IsSuccessStatusCode)
{
Expand Down Expand Up @@ -321,9 +320,8 @@ internal async Task<Uri[]> GenerateImagesUris(
request,
options: _nullIgnoreSerializerOptions,
cancellationToken: cancellationToken
).ConfigureAwait(false);
var responseContent = await response.Content.ReadAsStringAsync(cancellationToken)
.ConfigureAwait(false);
);
var responseContent = await response.Content.ReadAsStringAsync(cancellationToken);

if (!response.IsSuccessStatusCode)
{
Expand Down
3 changes: 1 addition & 2 deletions samples/ChatGpt.SpectreConsoleExample/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,7 @@
while (Console.Ask<string>($"[underline green]{name}[/]: ") is { } userMessage)
{
Console.Markup("[underline red]ChatGPT[/]: ");
var stream = chat.StreamNextMessageResponse(userMessage)
.ThrowOnCancellation(false);
var stream = chat.StreamNextMessageResponse(userMessage, throwOnCancellation: false);
await foreach (string chunk in stream.SkipWhile(string.IsNullOrWhiteSpace))
{
if (!chat.IsCancelled) Console.Write(chunk);
Expand Down
Loading

0 comments on commit 307af40

Please sign in to comment.