Skip to content

Commit

Permalink
Add host customizing feature to all constructors and factory methods;…
Browse files Browse the repository at this point in the history
… Almost all public members are documented; Release 2.5.0
  • Loading branch information
rodion-m committed Apr 28, 2023
1 parent 6ba197c commit 4f5ea42
Show file tree
Hide file tree
Showing 15 changed files with 141 additions and 36 deletions.
20 changes: 12 additions & 8 deletions OpenAI.ChatGpt.AspNetCore/ChatGPTFactory.cs
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,8 @@ public class ChatGPTFactory : IDisposable
private readonly ChatGPTConfig _config;
private readonly IChatHistoryStorage _chatHistoryStorage;
private readonly ITimeProvider _clock;
private bool _ensureStorageCreatedCalled;
private readonly bool _isHttpClientInjected;
private volatile bool _ensureStorageCreatedCalled;

public ChatGPTFactory(
IHttpClientFactory httpClientFactory,
Expand Down Expand Up @@ -58,10 +58,11 @@ public ChatGPTFactory(
string apiKey,
IChatHistoryStorage chatHistoryStorage,
ITimeProvider? clock = null,
ChatGPTConfig? config = null)
ChatGPTConfig? config = null,
string? host = null)
{
if (apiKey == null) throw new ArgumentNullException(nameof(apiKey));
_client = new OpenAiClient(apiKey);
ArgumentNullException.ThrowIfNull(apiKey);
_client = host is null ? new OpenAiClient(apiKey) : new OpenAiClient(apiKey, host);
_config = config ?? ChatGPTConfig.Default;
_chatHistoryStorage = chatHistoryStorage ?? throw new ArgumentNullException(nameof(chatHistoryStorage));
_clock = clock ?? new TimeProviderUtc();
Expand All @@ -77,10 +78,13 @@ private OpenAiClient CreateOpenAiClient(
return new OpenAiClient(httpClient);
}

public static ChatGPTFactory CreateInMemory(string apiKey, ChatGPTConfig? config = null)
public static ChatGPTFactory CreateInMemory(
string apiKey,
ChatGPTConfig? config = null,
string? host = null)
{
if (apiKey == null) throw new ArgumentNullException(nameof(apiKey));
return new ChatGPTFactory(apiKey, new InMemoryChatHistoryStorage(), new TimeProviderUtc(), config);
ArgumentNullException.ThrowIfNull(apiKey);
return new ChatGPTFactory(apiKey, new InMemoryChatHistoryStorage(), new TimeProviderUtc(), config, host);
}

public async Task<ChatGPT> Create(
Expand All @@ -89,7 +93,7 @@ public async Task<ChatGPT> Create(
bool ensureStorageCreated = true,
CancellationToken cancellationToken = default)
{
if (userId == null) throw new ArgumentNullException(nameof(userId));
ArgumentNullException.ThrowIfNull(userId);
if (ensureStorageCreated && !_ensureStorageCreatedCalled)
{
await _chatHistoryStorage.EnsureStorageCreated(cancellationToken);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,13 @@ public static class ServiceCollectionExtensions
{
public const string CredentialsConfigSectionPathDefault = "OpenAICredentials";
// ReSharper disable once InconsistentNaming
public const string CchatGPTConfigSectionPathDefault = "ChatGPTConfig";
public const string ChatGPTConfigSectionPathDefault = "ChatGPTConfig";

public static IServiceCollection AddChatGptInMemoryIntegration(
this IServiceCollection services,
bool injectInMemoryChatService = true,
string credentialsConfigSectionPath = CredentialsConfigSectionPathDefault,
string completionsConfigSectionPath = CchatGPTConfigSectionPathDefault)
string completionsConfigSectionPath = ChatGPTConfigSectionPathDefault)
{
ArgumentNullException.ThrowIfNull(services);
if (string.IsNullOrWhiteSpace(credentialsConfigSectionPath))
Expand Down Expand Up @@ -61,7 +61,7 @@ private static ChatService CreateChatService(IServiceProvider provider)
public static IServiceCollection AddChatGptIntegrationCore(
this IServiceCollection services,
string credentialsConfigSectionPath = CredentialsConfigSectionPathDefault,
string completionsConfigSectionPath = CchatGPTConfigSectionPathDefault)
string completionsConfigSectionPath = ChatGPTConfigSectionPathDefault)
{
ArgumentNullException.ThrowIfNull(services);
if (string.IsNullOrWhiteSpace(credentialsConfigSectionPath))
Expand Down
7 changes: 6 additions & 1 deletion OpenAI.ChatGpt.AspNetCore/Models/OpenAICredentials.cs
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,19 @@ namespace OpenAI.ChatGpt.AspNetCore.Models;
// ReSharper disable once InconsistentNaming
public class OpenAICredentials
{
private const string DefaultHost = "https://api.openai.com/v1/";

/// <summary>
/// OpenAI API key. Can be issued here: https://platform.openai.com/account/api-keys
/// </summary>
[Required]
public string ApiKey { get; set; }

/// <summary>
/// Open AI API host. Default is: <see cref="DefaultHost"/>
/// </summary>
[Url]
public string ApiHost { get; set; } = "https://api.openai.com/v1/";
public string ApiHost { get; set; } = DefaultHost;

public AuthenticationHeaderValue GetAuthHeader()
{
Expand Down
2 changes: 1 addition & 1 deletion OpenAI.ChatGpt.AspNetCore/OpenAI.ChatGpt.AspNetCore.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
<PackageId>OpenAI.ChatGPT.AspNetCore</PackageId>
<PackageProjectUrl>https://github.com/rodion-m/ChatGPT_API_dotnet</PackageProjectUrl>
<Product>OpenAI ChatGPT integration for .NET with DI</Product>
<Version>2.4.2</Version>
<Version>2.5.0</Version>
<Description>OpenAI Chat Completions API (ChatGPT) integration with easy DI supporting (Microsoft.Extensions.DependencyInjection). It allows you to use the API in your .NET applications. Also, the client supports streaming responses (like ChatGPT) via async streams.</Description>
<RepositoryUrl>https://github.com/rodion-m/ChatGPT_API_dotnet</RepositoryUrl>
<TargetFrameworks>net6.0;net7.0</TargetFrameworks>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ public static IServiceCollection AddChatGptEntityFrameworkIntegration(
this IServiceCollection services,
Action<DbContextOptionsBuilder> optionsAction,
string credentialsConfigSectionPath = CredentialsConfigSectionPathDefault,
string completionsConfigSectionPath = CchatGPTConfigSectionPathDefault)
string completionsConfigSectionPath = ChatGPTConfigSectionPathDefault)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(optionsAction);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
<PackageId>OpenAI.ChatGPT.EntityFrameworkCore</PackageId>
<PackageProjectUrl>https://github.com/rodion-m/ChatGPT_API_dotnet</PackageProjectUrl>
<Product>OpenAI ChatGPT integration for .NET with EF Core storage</Product>
<Version>2.4.2</Version>
<Version>2.5.0</Version>
<Description>OpenAI Chat Completions API (ChatGPT) integration with DI and EF Core supporting. It allows you to use the API in your .NET applications. Also, the client supports streaming responses (like ChatGPT) via async streams.</Description>
<RepositoryUrl>https://github.com/rodion-m/ChatGPT_API_dotnet</RepositoryUrl>
<TargetFrameworks>net6.0;net7.0</TargetFrameworks>
Expand Down
40 changes: 34 additions & 6 deletions OpenAI.ChatGpt/ChatGPT.cs
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,9 @@ public class ChatGPT : IDisposable
private readonly ChatGPTConfig? _config;
private readonly OpenAiClient _client;
private ChatService? _currentChat;

private static readonly string NoUser = Guid.Empty.ToString();
private readonly bool _isClientInjected;

/// <summary>
/// Use this constructor to create chat conversation provider for the specific user.
Expand All @@ -32,6 +35,7 @@ public ChatGPT(
_storage = chatHistoryStorage ?? throw new ArgumentNullException(nameof(chatHistoryStorage));
_clock = clock ?? throw new ArgumentNullException(nameof(clock));
_config = config;
_isClientInjected = true;
}

/// <summary>
Expand All @@ -46,8 +50,25 @@ public ChatGPT(
_client = client ?? throw new ArgumentNullException(nameof(client));
_storage = chatHistoryStorage ?? throw new ArgumentNullException(nameof(chatHistoryStorage));
_clock = clock ?? throw new ArgumentNullException(nameof(clock));
_userId = Guid.Empty.ToString();
_userId = NoUser;
_config = config;
_isClientInjected = true;
}

public ChatGPT(
string apiKey,
IChatHistoryStorage chatHistoryStorage,
ITimeProvider clock,
string? userId,
ChatGPTConfig? config,
string? host)
{
_client = host is null ? new OpenAiClient(apiKey) : new OpenAiClient(apiKey, host);
_userId = userId ?? NoUser;
_storage = chatHistoryStorage ?? throw new ArgumentNullException(nameof(chatHistoryStorage));
_clock = clock ?? throw new ArgumentNullException(nameof(clock));
_config = config;
_isClientInjected = false;
}

/// <summary>
Expand All @@ -57,17 +78,24 @@ public static Task<ChatService> CreateInMemoryChat(
string apiKey,
ChatGPTConfig? config = null,
UserOrSystemMessage? initialDialog = null,
ITimeProvider? clock = null)
ITimeProvider? clock = null,
string? host = null,
CancellationToken cancellationToken = default)
{
if (apiKey == null) throw new ArgumentNullException(nameof(apiKey));
var client = new OpenAiClient(apiKey);
var chatGpt = new ChatGPT(client, new InMemoryChatHistoryStorage(), clock ?? new TimeProviderUtc(), config);
return chatGpt.StartNewTopic(initialDialog: initialDialog);
ArgumentNullException.ThrowIfNull(apiKey);
var chatGpt = new ChatGPT(
apiKey, new InMemoryChatHistoryStorage(), clock ?? new TimeProviderUtc(), null, config, host);
return chatGpt.StartNewTopic(initialDialog: initialDialog, cancellationToken: cancellationToken);
}

public void Dispose()
{
Stop();
_currentChat?.Dispose();
if (!_isClientInjected)
{
_client.Dispose();
}
}

/// <summary> Continues the last topic or starts a new one.</summary>
Expand Down
6 changes: 2 additions & 4 deletions OpenAI.ChatGpt/Models/ChatCompletion/ChatCompletionModels.cs
Original file line number Diff line number Diff line change
Expand Up @@ -19,20 +19,18 @@ public static class ChatCompletionModels
public const string Default = Gpt3_5_Turbo;

/// <summary>
/// IMPORTANT: This model is available only by request. Link for joining waitlist: https://openai.com/waitlist/gpt-4-api
/// More capable than any GPT-3.5 model, able to do more complex tasks, and optimized for chat.
/// Will be updated with OpenAI's latest model iteration.
/// </summary>
[Obsolete("This model is available only by request. " +
"Link for joining waitlist: https://openai.com/waitlist/gpt-4-api")]
public const string Gpt4 = "gpt-4";

/// <summary>
/// IMPORTANT: This model is available only by request. Link for joining waitlist: https://openai.com/waitlist/gpt-4-api
/// Snapshot of gpt-4 from March 14th 2023.
/// Unlike gpt-4, this model will not receive updates,
/// and will only be supported for a three month period ending on June 14th 2023.
/// </summary>
[Obsolete("This model is available only by request. " +
"Link for joining waitlist: https://openai.com/waitlist/gpt-4-api")]
public const string Gpt4_0314 = "gpt-4-0314";

/// <summary>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ public float Temperature
/// <summary>
/// The maximum number of tokens allowed for the generated answer.
/// Defaults to <see cref="MaxTokensDefault"/>.
/// This value is validated and limited with <see cref="ChatCompletionModels.GetMaxTokensLimitForModel"/> meghod.
/// This value is validated and limited with <see cref="ChatCompletionModels.GetMaxTokensLimitForModel"/> method.
/// It's possible to calculate approximately tokens count using <see cref="ChatCompletionMessage.CalculateApproxTotalTokenCount()"/> method.
/// </summary>
/// <remarks>
Expand Down
70 changes: 67 additions & 3 deletions OpenAI.ChatGpt/Models/ChatGPTConfig.cs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ namespace OpenAI.ChatGpt.Models;
// ReSharper disable once InconsistentNaming
public class ChatGPTConfig
{
/// <summary>Default configuration.</summary>
public static ChatGPTConfig Default => new()
{
PassUserIdToOpenAiRequests = true
Expand All @@ -16,9 +17,57 @@ public class ChatGPTConfig
private string? _model;
private float? _temperature;

/// <summary>
/// This is a system message, that will be sent to OpenAI API as a first message.
/// Initial dialog to start with, that allows to fine-tune the chatbot (message from system).
/// </summary>
/// <example>
/// English teacher prompt:
/// ```
/// I want you to act as an English translator, spelling corrector and improver.
/// I will speak to you in any language and you will detect the language,
/// translate it and answer in the corrected and improved version of my text, in English.
/// I want you to replace my simplified A0-level words and sentences with
/// more beautiful and elegant, upper level English words and sentences.
/// Keep the meaning same, but make them more literary.
/// I want you to only reply the correction, the improvements and nothing else,
/// do not write explanations.
/// My first sentence is “istanbulu cok seviyom burada olmak cok guzel”
/// ```
/// See more prompts here: https://prompts.chat/
/// </example>
/// <remarks>
/// If <see cref="InitialSystemMessage"/> and <see cref="InitialUserMessage"/> are provided,
/// then both messages will be send to OpenAI API.
/// More info about initial message: https://github.com/openai/openai-python/blob/main/chatml.md
/// </remarks>
public string? InitialSystemMessage { get; set; }

/// <summary>
/// This is a user message, that will be sent to OpenAI API as a first message.
/// Initial dialog to start with, that allows to fine-tune the chatbot (message from the user).
/// <see cref="InitialSystemMessage"/>
/// </summary>
/// <remarks>
/// If <see cref="InitialSystemMessage"/> and <see cref="InitialUserMessage"/> are provided,
/// then both messages will be send to OpenAI API.
/// More info about initial message: https://github.com/openai/openai-python/blob/main/chatml.md
/// </remarks>
public string? InitialUserMessage { get; set; }

/// <summary>
/// The maximum number of tokens allowed for the generated answer.
/// Defaults to <see cref="ChatCompletionRequest.MaxTokensDefault"/>.
/// This value is validated and limited with <see cref="ChatCompletionModels.GetMaxTokensLimitForModel"/> method.
/// It's possible to calculate approximately tokens count using <see cref="ChatCompletionMessage.CalculateApproxTotalTokenCount()"/> method.
/// Maps to: <see cref="ChatCompletionRequest.MaxTokens"/>
/// </summary>
/// <remarks>
/// The number of tokens can be retrieved from the API response: <see cref="ChatCompletionResponse.Usage"/>
/// As a rule of thumb for English, 1 token is around 4 characters (so 100 tokens ≈ 75 words).
/// See: https://platform.openai.com/tokenizer
/// Encoding algorithm can be found here: https://github.com/latitudegames/GPT-3-Encoder
/// </remarks>
public int? MaxTokens
{
get => _maxTokens;
Expand All @@ -42,6 +91,7 @@ public int? MaxTokens

/// <summary>
/// ID of the model to use. One of: <see cref="ChatCompletionModels"/>
/// Maps to: <see cref="ChatCompletionRequest.Model"/>
/// </summary>
public string? Model
{
Expand All @@ -54,6 +104,7 @@ public string? Model
/// Higher values like 0.8 will make the output more random,
/// while lower values like 0.2 will make it more focused and deterministic.
/// Predefined values: <see cref="ChatCompletionTemperatures"/>
/// Maps to: <see cref="ChatCompletionRequest.Temperature"/>
/// </summary>
[Range(ChatCompletionTemperatures.Minimum, ChatCompletionTemperatures.Maximum)]
public float? Temperature
Expand All @@ -62,9 +113,19 @@ public float? Temperature
set => _temperature = value is { } temp ? ChatCompletionTemperatures.Custom(temp) : null;
}

/// <summary>
/// Whether to include the user ID into OpenAI requests.
/// See also: <see cref="ChatCompletionRequest.User"/>
/// </summary>
/// <remarks>
/// More info about users: https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids
/// </remarks>
public bool? PassUserIdToOpenAiRequests { get; set; }

public UserOrSystemMessage? GetInitialDialogOrNull()
/// <summary>
/// Returns initial dialog to start with, that allows to fine-tune the chatbot.
/// </summary>
public virtual UserOrSystemMessage? GetInitialDialogOrNull()
{
return (InitialSystemMessage, InitialUserMessage) switch
{
Expand Down Expand Up @@ -110,8 +171,11 @@ internal void ModifyRequest(ChatCompletionRequest request)
return result;
}

public static ChatGPTConfig CombineOrDefault(
ChatGPTConfig? baseConfig, ChatGPTConfig? config)
/// <summary>
/// Merges two <see cref="ChatGPTConfig"/>s with respect to <paramref name="config"/>.
/// If both <paramref name="baseConfig"/> and <paramref name="config"/> are null, then returns <see cref="Default"/>.
/// </summary>
public static ChatGPTConfig CombineOrDefault(ChatGPTConfig? baseConfig, ChatGPTConfig? config)
{
return Combine(baseConfig, config) ?? Default;
}
Expand Down
2 changes: 1 addition & 1 deletion OpenAI.ChatGpt/OpenAI.ChatGpt.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
<PackageId>OpenAI.ChatGPT</PackageId>
<PackageProjectUrl>https://github.com/rodion-m/ChatGPT_API_dotnet</PackageProjectUrl>
<Product>OpenAI ChatGPT integration for .NET</Product>
<Version>2.4.2</Version>
<Version>2.5.0</Version>
<Description>.NET integration for ChatGPT with streaming responses supporting (like ChatGPT) via async streams.</Description>
<RepositoryUrl>https://github.com/rodion-m/ChatGPT_API_dotnet</RepositoryUrl>
<PackageLicenseExpression>MIT</PackageLicenseExpression>
Expand Down
10 changes: 7 additions & 3 deletions OpenAI.ChatGpt/OpenAIClient.cs
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,8 @@ public OpenAiClient(string apiKey, string host = DefaultHost)
throw new ArgumentException("Value cannot be null or whitespace.", nameof(apiKey));
ArgumentNullException.ThrowIfNull(host);
if(!Uri.TryCreate(host, UriKind.Absolute, out _) || !host.EndsWith('/'))
throw new ArgumentException("Host must be a valid absolute URI and end with a slash.", nameof(host));
throw new ArgumentException("Host must be a valid absolute URI and end with a slash." +
$"For example: {DefaultHost}", nameof(host));
_httpClient = new HttpClient()
{
BaseAddress = new Uri(host)
Expand Down Expand Up @@ -86,7 +87,10 @@ private static void ValidateHttpClient(HttpClient httpClient)

public void Dispose()
{
if (!_isHttpClientInjected) _httpClient.Dispose();
if (!_isHttpClientInjected)
{
_httpClient.Dispose();
}
}

public async Task<string> GetChatCompletions(
Expand Down Expand Up @@ -170,7 +174,7 @@ internal async Task<ChatCompletionResponse> GetChatCompletionsRaw(
options: _nullIgnoreSerializerOptions
);
var responseContent = await response.Content.ReadAsStringAsync(cancellationToken);

if (!response.IsSuccessStatusCode)
{
throw new NotExpectedResponseException(response.StatusCode, responseContent);
Expand Down
2 changes: 1 addition & 1 deletion samples/ChatGpt.BlazorExample/ChatGpt.BlazorExample.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

<ItemGroup>
<PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="7.0.5" />
<PackageReference Include="OpenAI.ChatGPT.EntityFrameworkCore" Version="2.4.0" />
<PackageReference Include="OpenAI.ChatGPT.EntityFrameworkCore" Version="2.4.1" />
</ItemGroup>

</Project>
Loading

0 comments on commit 4f5ea42

Please sign in to comment.