Skip to content

Commit

Permalink
Update readme to v2+; Release 2.4.1
Browse files Browse the repository at this point in the history
  • Loading branch information
rodion-m committed Apr 24, 2023
1 parent 67038e0 commit 6316eea
Show file tree
Hide file tree
Showing 7 changed files with 159 additions and 82 deletions.
34 changes: 18 additions & 16 deletions OpenAI.ChatGpt.AspNetCore/OpenAI.ChatGpt.AspNetCore.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
<PackageId>OpenAI.ChatGPT.AspNetCore</PackageId>
<PackageProjectUrl>https://github.com/rodion-m/ChatGPT_API_dotnet</PackageProjectUrl>
<Product>OpenAI ChatGPT integration for .NET with DI</Product>
<Version>2.4.0</Version>
<Version>2.4.1</Version>
<Description>OpenAI Chat Completions API (ChatGPT) integration with easy DI supporting (Microsoft.Extensions.DependencyInjection). It allows you to use the API in your .NET applications. Also, the client supports streaming responses (like ChatGPT) via async streams.</Description>
<RepositoryUrl>https://github.com/rodion-m/ChatGPT_API_dotnet</RepositoryUrl>
<TargetFrameworks>net6.0;net7.0</TargetFrameworks>
Expand All @@ -19,29 +19,31 @@
</PropertyGroup>

<ItemGroup>
<ProjectReference Include="..\OpenAI.ChatGpt\OpenAI.ChatGpt.csproj" />
<ProjectReference Include="..\OpenAI.ChatGpt\OpenAI.ChatGpt.csproj"/>
</ItemGroup>

<ItemGroup>
<PackageReference Include="ConfigureAwait.Fody" Version="3.3.2" />
<PackageReference Include="Fody" Version="6.6.4">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.Extensions.Hosting" Version="7.0.1" />
<PackageReference Include="Microsoft.Extensions.Http" Version="7.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="7.0.1" />
<PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="7.0.0" />
<PackageReference Include="Microsoft.Extensions.Options.DataAnnotations" Version="7.0.0" />
<PackageReference Include="ConfigureAwait.Fody" Version="3.3.2">
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="Fody" Version="6.6.4">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.Extensions.Hosting" Version="7.0.1"/>
<PackageReference Include="Microsoft.Extensions.Http" Version="7.0.0"/>
<PackageReference Include="Microsoft.Extensions.Options" Version="7.0.1"/>
<PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="7.0.0"/>
<PackageReference Include="Microsoft.Extensions.Options.DataAnnotations" Version="7.0.0"/>
</ItemGroup>

<ItemGroup>
<Folder Include="Exceptions\" />
<Folder Include="Exceptions\"/>
</ItemGroup>

<ItemGroup>
<InternalsVisibleTo Include="OpenAI.ChatGpt.UnitTests" />
<InternalsVisibleTo Include="OpenAI.ChatGpt.IntegrationTests" />
<InternalsVisibleTo Include="OpenAI.ChatGpt.UnitTests"/>
<InternalsVisibleTo Include="OpenAI.ChatGpt.IntegrationTests"/>
</ItemGroup>

</Project>
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
<PackageId>OpenAI.ChatGPT.EntityFrameworkCore</PackageId>
<PackageProjectUrl>https://github.com/rodion-m/ChatGPT_API_dotnet</PackageProjectUrl>
<Product>OpenAI ChatGPT integration for .NET with EF Core storage</Product>
<Version>2.4.0</Version>
<Version>2.4.1</Version>
<Description>OpenAI Chat Completions API (ChatGPT) integration with DI and EF Core supporting. It allows you to use the API in your .NET applications. Also, the client supports streaming responses (like ChatGPT) via async streams.</Description>
<RepositoryUrl>https://github.com/rodion-m/ChatGPT_API_dotnet</RepositoryUrl>
<TargetFrameworks>net6.0;net7.0</TargetFrameworks>
Expand All @@ -20,21 +20,22 @@
</PropertyGroup>

<ItemGroup>
<PackageReference Include="ConfigureAwait.Fody" Version="3.3.2" />
<PackageReference Include="Fody" Version="6.6.4">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.EntityFrameworkCore" Version="7.0.5" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="7.0.5">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="System.Linq.Async" Version="6.0.1" />
<PackageReference Include="ConfigureAwait.Fody" Version="3.3.2">
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="Fody" Version="6.6.4">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.EntityFrameworkCore" Version="7.0.5" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="7.0.5">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>

<ItemGroup>
<ProjectReference Include="..\OpenAI.ChatGpt.AspNetCore\OpenAI.ChatGpt.AspNetCore.csproj" />
<ProjectReference Include="..\OpenAI.ChatGpt.AspNetCore\OpenAI.ChatGpt.AspNetCore.csproj" />
</ItemGroup>

<ItemGroup>
Expand Down
24 changes: 13 additions & 11 deletions OpenAI.ChatGpt/OpenAI.ChatGpt.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -10,27 +10,29 @@
<PackageId>OpenAI.ChatGPT</PackageId>
<PackageProjectUrl>https://github.com/rodion-m/ChatGPT_API_dotnet</PackageProjectUrl>
<Product>OpenAI ChatGPT integration for .NET</Product>
<Version>2.4.0</Version>
<Version>2.4.1</Version>
<Description>.NET integration for ChatGPT with streaming responses supporting (like ChatGPT) via async streams.</Description>
<RepositoryUrl>https://github.com/rodion-m/ChatGPT_API_dotnet</RepositoryUrl>
<PackageLicenseExpression>MIT</PackageLicenseExpression>
<PackageLicenseExpression>MIT</PackageLicenseExpression>
<TargetFrameworks>net6.0;net7.0</TargetFrameworks>
<RootNamespace>OpenAI.ChatGpt</RootNamespace>
<Copyright>Rodion Mostovoi</Copyright>
<PackageTags>chatgpt, openai, sdk, api, chatcompletions, gpt3, gpt4</PackageTags>
</PropertyGroup>

<ItemGroup>
<InternalsVisibleTo Include="OpenAI.ChatGpt.UnitTests" />
<InternalsVisibleTo Include="OpenAI.ChatGpt.IntegrationTests" />
<InternalsVisibleTo Include="OpenAI.ChatGpt.UnitTests"/>
<InternalsVisibleTo Include="OpenAI.ChatGpt.IntegrationTests"/>
</ItemGroup>

<ItemGroup>
<PackageReference Include="ConfigureAwait.Fody" Version="3.3.2" />
<PackageReference Include="Fody" Version="6.6.4">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="ConfigureAwait.Fody" Version="3.3.2">
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="Fody" Version="6.6.4">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>

</Project>
151 changes: 111 additions & 40 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,67 +1,98 @@
# OpenAI Chat Completions (ChatGPT) and Images client for .NET
# ChatGPT integration for .NET
[![.NET](https://github.com/rodion-m/ChatGPT_API_dotnet/actions/workflows/dotnet.yml/badge.svg)](https://github.com/rodion-m/ChatGPT_API_dotnet/actions/workflows/dotnet.yml) \
This is a .NET client for the OpenAI Chat Completions API (ChatGPT). It allows you to use the API in your .NET applications. Also, the client supports streaming responses (like ChatGPT) via async streams. \
[![](assets/chatgpt_console_spectre_example.gif)](blob/master/samples/ChatGpt.SpectreConsoleExample/Program.cs)
OpenAI Chat Completions API (ChatGPT) integration with DI and EF Core supporting. It allows you to use the API in your .NET applications. Also, the client supports streaming responses (like ChatGPT) via async streams. \
***Looking for a ready solution to integrate ChatGPT into your application? It'll be available in few days (prototype is available [here](https://github.com/rodion-m/ChatGPT_API_dotnet/blob/master/OpenAI.ChatGpt/ChatGPT.cs)). At the moment you can just use the client directly.***

## Preparation
First, you need to create an OpenAI account and get an API key. You can do this at https://platform.openai.com/account/api-keys.

## Installation
You can install the package via NuGet:
The easiest way to use ChatGPT service in your .NET project with DI and persistence (EF Core) supporting is to install the NuGet package [OpenAI.ChatGPT.EntityFrameworkCore](https://www.nuget.org/packages/OpenAI.ChatGPT.EntityFrameworkCore/):
```
Install-Package OpenAI.ChatGPT
```
Then create an instance of `OpenAIClient`:
```csharp
_client = new OpenAiClient("{YOUR_OPENAI_API_KEY}");
Install-Package OpenAI.ChatGPT.EntityFrameworkCore
```
If you don't want to use EF Core, you can install the package [OpenAI.ChatGPT.AspNetCore](https://www.nuget.org/packages/OpenAI.ChatGPT.AspNetCore/) and implement your own storage for chat history, using `IChatHistoryStorage` interface. \

## Simple usage of the Chat Completions API
## Usage
1. Set OpenAI API key or even host (optional) in your project user secrets, or appsettings.json (not safe):
```json
{
"OpenAICredentials": {
"ApiKey": "your-api-key-from-openai",
"ApiHost": "https://api.openai.com/v1/"
}
}
```
Also, you can specify OpenAI API key as environment variable `ASPNETCORE_OpenAICredentials:ApiKey`.
2. Add ChatGPT integration with EF to your DI container:
```csharp
string text = "Who are you?";
string response = await _client.GetChatCompletions(new UserMessage(text), maxTokens: 80);
Console.WriteLine(response);
builder.Services.AddChatGptEntityFrameworkIntegration(
options => options.UseSqlite("Data Source=chats.db"));
```
Instead of `options.UseSqlite("Data Source=chats.db")` use your own db and connection string.

## Streaming response with async streams (like ChatGPT)
3. Inject `ChatGPTFactory` to your service and use it to create `ChatGPT` instance:
```csharp
var text = "Write the world top 3 songs of Soul genre";
await foreach (string chunk in _client.StreamChatCompletions(new UserMessage(text), maxTokens: 80))
public class YourService
{
Console.Write(chunk);
private readonly ChatGPTFactory _chatGptFactory;

public YourService(ChatGPTFactory chatGptFactory)
{
_chatGptFactory = chatGptFactory;
}

public async Task<string> GetAnswer(string text)
{
ChatGPT chatGpt = await _chatGptFactory.Create(userId);
var chatService = await chatGpt.ContinueOrStartNewTopic();
response = await _chatService.GetNextMessageResponse(_prompt);
return response;
}
}
```

## Continue dialog with ChatGPT (message history)
Use `ThenAssistant` and `ThenUser` methods to create a dialog:
```csharp
var dialog = Dialog.StartAsUser("How many meters are in a kilometer? Write just the number.") //the message from user
.ThenAssistant("1000") // response from the assistant
.ThenUser("Convert it to hex. Write just the number."); // the next message from user
await foreach (var chunk in _client.StreamChatCompletions(dialog, maxTokens: 80))
See [Blazor Example](blob/master/samples/ChatGpt.BlazorExample/Pages/Index.razor).
If you want to configure request parameters, you can do it in `appsettings.json` configuration or in `ChatGPTFactory.Create` or in `ChatGPT.CreateTopic` methods.
```json
{
Console.Write(chunk);
"ChatGPTConfig": {
"InitialSystemMessage": null,
"InitialUserMessage": null,
"MaxTokens": null,
"Model": null,
"Temperature": null,
"PassUserIdToOpenAiRequests": true
}
}
```
Or just send message history as a collection.
See parameters description inside [ChatGPTConfig](blob/master/OpenAI.ChatGpt/Models/ChatGPTConfig.cs).

## OpenAI Images API (text-to-image) [aka DALLE-2]
### Generate image bytes
```csharp
byte[] image = await _client.GenerateImageBytes("bicycle", "test", OpenAiImageSize._256);
```
### Generate images uris
## Exceptions
If the server answer is not success status code, the client will throw [NotExpectedResponseException](OpenAI.ChatGpt/Exceptions/NotExpectedResponseException.cs). The exception will contain the error message from the OpenAI API. \
By default, requesting cancellation or `ChatService.Stop()` method calling will throw `OperationCanceledException`. If you don't want to throw it (relevant for streaming responses), you can set `throwOnCancellation` parameter to `false`:
```csharp
Uri[] uris = await _client.GenerateImagesUris("bicycle", "test", OpenAiImageSize._256, count: 2);
await foreach (string chunk in chatService.StreamNextMessageResponse(text, throwOnCancellation: false))
{
//...
}
```
More examples see in the tests.

## Exceptions
If something went wrong, the client will throw `NotExpectedResponseException`. The exception will contain the error message from the OpenAI API.
## Thread safety and async
`ChatGPTFactory`, `ChatGPT` and `ChatService` thread-safety is depend on the `IChatHistoryStorage` implementation. If you use `ChatGPTFactory` with entity framework, it's NOT thread-safe. \
Anyways, this services are designed to be used safely with DI, so you don't need to worry about it. \
All from all the packages are designed to be used in async context and use `ConfigureAwait(false)` (thanks for the `ConfigureAwait.Fody` package).

## Retries, timeouts and other policies
Since `ChatGPTFactory` depends on `IHttClientFactory`, you can easily use any of the available policies for it, like Polly. \

## Examples
* [Blazor Example](blob/master/samples/ChatGpt.BlazorExample)
* [Console Example](blob/master/samples/ChatGpt.ConsoleExample/Program.cs)
* [Spectre Console Example](blob/master/samples/ChatGpt.SpectreConsoleExample/Program.cs)

## API Parameters
Here is a list of all parameters that can be used in the ChatCompletions (ChatGPT) API request (https://github.com/rodion-m/ChatGPT_API_dotnet/blob/master/OpenAI/Models/ChatCompletion/ChatCompletionRequest.cs).
Here is a list of the main parameters that can be used in the ChatCompletions (ChatGPT) API request (blob/master/OpenAI.ChatGpt/Models/ChatCompletion/ChatCompletionRequest.cs).
Some of them are taken from this article: https://towardsdatascience.com/gpt-3-parameters-and-prompt-design-1a595dc5b405 \
Below listed parameters for ChatCompletions API.

Expand All @@ -73,8 +104,8 @@ The prediction-generating AI model is specified by the engine parameter. The ava
* `ChatCompletionModels.Gpt4_0314`: Snapshot of gpt-4 from March 14th 2023. Unlike gpt-4, this model will not receive updates, and will only be supported for a three month period ending on June 14th 2023. \*
* `ChatCompletionModels.Gpt4_32k`: Same capabilities as the base gpt-4 mode but with 4x the context length. Will be updated with OpenAI's latest model iteration. \*
* `ChatCompletionModels.Gpt4_32k_0314`: Snapshot of gpt-4-32 from March 14th 2023. Unlike gpt-4-32k, this model will not receive updates, and will only be supported for a three month period ending on June 14th 2023. \* \
Note that training data for all models is up to Sep 2021. \
\* These models are currently in beta and are not yet available to all users. Here is the link for joining waitlist: https://openai.com/waitlist/gpt-4-api
Note that training data for all models is up to Sep 2021. \
\* These models are currently in beta and are not yet available to all users. Here is the link for joining waitlist: https://openai.com/waitlist/gpt-4-api

### MaxTokens
The maximum number of tokens allowed for the generated answer. Defaults to `ChatCompletionRequest.MaxTokensDefault` (64).
Expand All @@ -98,3 +129,43 @@ But, before applying the softmax function, we can use a trick inspired by thermo
A temperature parameter close to 1 would mean that the logits are passed through the softmax function without modification. If the temperature is close to zero, the highest probable tokens will become very likely compared to the other tokens, i.e. the model becomes more deterministic and will always output the same set of tokens after a given sequence of words.

More parameters description can be found here: Some of them are taken from this article: https://towardsdatascience.com/gpt-3-parameters-and-prompt-design-1a595dc5b405

# Raw client without DI
If you don't need DI and chat history, you can use only the NuGet package [OpenAI.ChatGPT](https://www.nuget.org/packages/OpenAI.ChatGPT):
```
Install-Package OpenAI.ChatGPT
```
Then create an instance of `OpenAIClient`:
```csharp
_client = new OpenAiClient("{YOUR_OPENAI_API_KEY}");
```

## Simple usage of the Chat Completions API (raw client)
```csharp
string text = "Who are you?";
string response = await _client.GetChatCompletions(new UserMessage(text), maxTokens: 80);
Console.WriteLine(response);
```

## Streaming response with async streams (like ChatGPT)
```csharp
var text = "Write the world top 3 songs of Soul genre";
await foreach (string chunk in _client.StreamChatCompletions(new UserMessage(text), maxTokens: 80))
{
Console.Write(chunk);
}
```

## Continue dialog with ChatGPT (message history)
Use `ThenAssistant` and `ThenUser` methods to create a dialog:
```csharp
var dialog = Dialog.StartAsUser("How many meters are in a kilometer? Write just the number.") //the message from user
.ThenAssistant("1000") // response from the assistant
.ThenUser("Convert it to hex. Write just the number."); // the next message from user
await foreach (var chunk in _client.StreamChatCompletions(dialog, maxTokens: 80))
{
Console.Write(chunk);
}
```
Or just send message history as a collection.
Binary file added assets/chatgpt_console_spectre_example.gif
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
2 changes: 1 addition & 1 deletion samples/ChatGpt.BlazorExample/ChatGpt.BlazorExample.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

<ItemGroup>
<PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="7.0.5" />
<PackageReference Include="OpenAI.ChatGPT.EntityFrameworkCore" Version="2.2.2" />
<PackageReference Include="OpenAI.ChatGPT.EntityFrameworkCore" Version="2.4.0" />
</ItemGroup>

</Project>
3 changes: 2 additions & 1 deletion samples/ChatGpt.BlazorExample/Pages/Index.razor
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ else
string _errorMessage = "";
bool _processing = false;
int _totalTokens = 0;
private Chat _chatService;
private ChatService? _chatService;
private string _userId = "test-user-id";

protected override async Task OnInitializedAsync()
Expand Down Expand Up @@ -167,6 +167,7 @@ else

async Task CallChatGpt()
{
if(_chatService is null) return;
try
{
// Set Processing to true to indicate that the method is processing
Expand Down

0 comments on commit 6316eea

Please sign in to comment.