Skip to content

Commit

Permalink
Fix Chat.IsWriting issue; Release 2.2.1
Browse files Browse the repository at this point in the history
  • Loading branch information
rodion-m committed Apr 19, 2023
1 parent 664dfd6 commit 8bbeea6
Show file tree
Hide file tree
Showing 5 changed files with 111 additions and 33 deletions.
2 changes: 1 addition & 1 deletion OpenAI.ChatGpt.AspNetCore/OpenAI.ChatGpt.AspNetCore.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
<PackageId>OpenAI.ChatGPT.AspNetCore</PackageId>
<PackageProjectUrl>https://github.com/rodion-m/ChatGPT_API_dotnet</PackageProjectUrl>
<Product>OpenAI ChatGPT integration for .NET with DI</Product>
<Version>2.2.0</Version>
<Version>2.2.1</Version>
<Description>OpenAI Chat Completions API (ChatGPT) integration with easy DI supporting (Microsoft.Extensions.DependencyInjection). It allows you to use the API in your .NET applications. Also, the client supports streaming responses (like ChatGPT) via async streams.</Description>
<RepositoryUrl>https://github.com/rodion-m/ChatGPT_API_dotnet</RepositoryUrl>
<TargetFrameworks>net6.0;net7.0</TargetFrameworks>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
<PackageId>OpenAI.ChatGPT.EntityFrameworkCore</PackageId>
<PackageProjectUrl>https://github.com/rodion-m/ChatGPT_API_dotnet</PackageProjectUrl>
<Product>OpenAI ChatGPT integration for .NET with EF Core storage</Product>
<Version>2.2.0</Version>
<Version>2.2.1</Version>
<Description>OpenAI Chat Completions API (ChatGPT) integration with DI and EF Core supporting. It allows you to use the API in your .NET applications. Also, the client supports streaming responses (like ChatGPT) via async streams.</Description>
<RepositoryUrl>https://github.com/rodion-m/ChatGPT_API_dotnet</RepositoryUrl>
<TargetFrameworks>net6.0;net7.0</TargetFrameworks>
Expand Down
58 changes: 57 additions & 1 deletion OpenAI.ChatGpt/AsyncEnumerableExtensions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ internal static async IAsyncEnumerable<T> ConfigureExceptions<T>(
this IAsyncEnumerable<T> stream,
bool throwOnCancellation) where T: class
{
if (stream == null) throw new ArgumentNullException(nameof(stream));
ArgumentNullException.ThrowIfNull(stream);
var enumerator = stream.GetAsyncEnumerator();
T? result = null;
var hasResult = true;
Expand All @@ -34,4 +34,60 @@ internal static async IAsyncEnumerable<T> ConfigureExceptions<T>(

await enumerator.DisposeAsync();
}

internal static async IAsyncEnumerable<T> ConfigureExceptions<T>(
this IAsyncEnumerable<T> stream,
bool throwOnCancellation,
Action<Exception>? onExceptionBeforeThrowing) where T: class
{
ArgumentNullException.ThrowIfNull(stream);
IAsyncEnumerator<T> enumerator;
try
{
enumerator = stream.GetAsyncEnumerator();
}
catch (Exception e)
{
onExceptionBeforeThrowing?.Invoke(e);
throw;
}
T? result = null;
var hasResult = true;
while (hasResult)
{
try
{
hasResult = await enumerator.MoveNextAsync();
result = hasResult ? enumerator.Current : null;
}
catch (OperationCanceledException e)
{
await DisposeAsyncSafe();
onExceptionBeforeThrowing?.Invoke(e);
if (throwOnCancellation)
{
throw;
}
}
if (result != null)
{
yield return result;
}
}

await DisposeAsyncSafe();

async Task DisposeAsyncSafe()
{
try
{
await enumerator.DisposeAsync();
}
catch (Exception e)
{
onExceptionBeforeThrowing?.Invoke(e);
throw;
}
}
}
}
80 changes: 51 additions & 29 deletions OpenAI.ChatGpt/Chat.cs
Original file line number Diff line number Diff line change
Expand Up @@ -82,29 +82,39 @@ private async Task<string> GetNextMessageResponse(
UserOrSystemMessage message,
CancellationToken cancellationToken)
{
if (IsWriting)
{
throw new InvalidOperationException("Cannot start a new chat session while the previous one is still in progress.");
}
var originalCancellation = cancellationToken;
_cts?.Dispose();
_cts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
_cts.Token.Register(() => IsWriting = false);
_cts = CancellationTokenSource.CreateLinkedTokenSource(originalCancellation);
cancellationToken = _cts.Token;

var history = await LoadHistory(cancellationToken);
var messages = history.Append(message);

IsWriting = true; //TODO set false on exception
var response = await _client.GetChatCompletionsRaw(
messages,
user: Topic.Config.PassUserIdToOpenAiRequests is true ? UserId : null,
requestModifier: Topic.Config.ModifyRequest,
cancellationToken: _cts.Token
);
SetLastResponse(response);

var assistantMessage = response.GetMessageContent();
await _chatHistoryStorage.SaveMessages(
UserId, TopicId, message, assistantMessage, _clock.GetCurrentTime(), _cts.Token);
IsWriting = false;
_isNew = false;
IsWriting = true;
try
{
var response = await _client.GetChatCompletionsRaw(
messages,
user: Topic.Config.PassUserIdToOpenAiRequests is true ? UserId : null,
requestModifier: Topic.Config.ModifyRequest,
cancellationToken: cancellationToken
);
SetLastResponse(response);

return assistantMessage;
var assistantMessage = response.GetMessageContent();
await _chatHistoryStorage.SaveMessages(
UserId, TopicId, message, assistantMessage, _clock.GetCurrentTime(), cancellationToken);
_isNew = false;
return assistantMessage;
}
finally
{
IsWriting = false;
}
}

public IAsyncEnumerable<string> StreamNextMessageResponse(
Expand All @@ -122,45 +132,57 @@ private async IAsyncEnumerable<string> StreamNextMessageResponse(
bool throwOnCancellation,
[EnumeratorCancellation] CancellationToken cancellationToken)
{
if (IsWriting)
{
throw new InvalidOperationException("Cannot start a new chat session while the previous one is still in progress.");
}
var originalCancellationToken = cancellationToken;
_cts?.Dispose();
_cts = CancellationTokenSource.CreateLinkedTokenSource(originalCancellationToken);
cancellationToken = _cts.Token;
cancellationToken.Register(() => IsWriting = false);

var history = await LoadHistory(cancellationToken);
var messages = history.Append(message);
var sb = new StringBuilder();
IsWriting = true; //TODO set false on exception
IsWriting = true;
var stream = _client.StreamChatCompletions(
messages,
user: Topic.Config.PassUserIdToOpenAiRequests is true ? UserId : null,
requestModifier: Topic.Config.ModifyRequest,
cancellationToken: cancellationToken
);
await foreach (var chunk in stream
.ConfigureExceptions(throwOnCancellation)
.WithCancellation(cancellationToken))
).ConfigureExceptions(throwOnCancellation, _ => IsWriting = false);
await foreach (var chunk in stream.WithCancellation(cancellationToken))
{
sb.Append(chunk);
yield return chunk;
}

if(cancellationToken.IsCancellationRequested && !throwOnCancellation)
{
IsWriting = false;
yield break;
}

SetLastResponse(null);

await _chatHistoryStorage.SaveMessages(
UserId, TopicId, message, sb.ToString(), _clock.GetCurrentTime(), cancellationToken);
IsWriting = false;
_isNew = false;
try
{
await _chatHistoryStorage.SaveMessages(
UserId,
TopicId,
message,
sb.ToString(),
_clock.GetCurrentTime(),
cancellationToken);
_isNew = false;
}
finally
{
IsWriting = false;
}
}

private async Task<IEnumerable<ChatCompletionMessage>> LoadHistory(CancellationToken cancellationToken)
private async Task<IEnumerable<ChatCompletionMessage>> LoadHistory(
CancellationToken cancellationToken)
{
if (_isNew) return Enumerable.Empty<ChatCompletionMessage>();
return await _chatHistoryStorage.GetMessages(UserId, TopicId, cancellationToken);
Expand Down
2 changes: 1 addition & 1 deletion OpenAI.ChatGpt/OpenAI.ChatGpt.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
<PackageId>OpenAI.ChatGPT</PackageId>
<PackageProjectUrl>https://github.com/rodion-m/ChatGPT_API_dotnet</PackageProjectUrl>
<Product>OpenAI ChatGPT integration for .NET</Product>
<Version>2.2.0</Version>
<Version>2.2.1</Version>
<Description>.NET integration for ChatGPT with streaming responses supporting (like ChatGPT) via async streams.</Description>
<RepositoryUrl>https://github.com/rodion-m/ChatGPT_API_dotnet</RepositoryUrl>
<PackageLicenseExpression>MIT</PackageLicenseExpression>
Expand Down

0 comments on commit 8bbeea6

Please sign in to comment.