From 5f023b53ad7ca3e4f20ceac4daf65408447a2fe9 Mon Sep 17 00:00:00 2001
From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com>
Date: Thu, 20 Jun 2024 19:06:55 +0100
Subject: [PATCH 001/226] .Net OpenAI SDK V2 - Phase 00 (Feature Branch)
(#6894)
Empty Projects created for the following changes getting in place in
small PRs.
This pull request primarily includes changes to the .NET project files
and the solution file. These changes introduce new projects and update
package dependencies. The most significant changes are:
---
dotnet/Directory.Packages.props | 2 +
dotnet/SK-dotnet.sln | 36 ++++++++++
dotnet/samples/ConceptsV2/ConceptsV2.csproj | 72 +++++++++++++++++++
.../Connectors.OpenAIV2.UnitTests.csproj | 39 ++++++++++
.../Connectors.OpenAIV2.csproj | 34 +++++++++
.../IntegrationTestsV2.csproj | 67 +++++++++++++++++
6 files changed, 250 insertions(+)
create mode 100644 dotnet/samples/ConceptsV2/ConceptsV2.csproj
create mode 100644 dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Connectors.OpenAIV2.UnitTests.csproj
create mode 100644 dotnet/src/Connectors/Connectors.OpenAIV2/Connectors.OpenAIV2.csproj
create mode 100644 dotnet/src/IntegrationTestsV2/IntegrationTestsV2.csproj
diff --git a/dotnet/Directory.Packages.props b/dotnet/Directory.Packages.props
index d514e22cb5f4..146311afca6f 100644
--- a/dotnet/Directory.Packages.props
+++ b/dotnet/Directory.Packages.props
@@ -5,6 +5,8 @@
true
+
+
diff --git a/dotnet/SK-dotnet.sln b/dotnet/SK-dotnet.sln
index 2d11481810cb..9f09181e3846 100644
--- a/dotnet/SK-dotnet.sln
+++ b/dotnet/SK-dotnet.sln
@@ -314,6 +314,14 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "TimePlugin", "samples\Demos
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Memory.AzureCosmosDBNoSQL", "src\Connectors\Connectors.Memory.AzureCosmosDBNoSQL\Connectors.Memory.AzureCosmosDBNoSQL.csproj", "{B0B3901E-AF56-432B-8FAA-858468E5D0DF}"
EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.OpenAIV2", "src\Connectors\Connectors.OpenAIV2\Connectors.OpenAIV2.csproj", "{8AC4D976-BBBA-44C7-9CFD-567F0B4751D8}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.OpenAIV2.UnitTests", "src\Connectors\Connectors.OpenAIV2.UnitTests\Connectors.OpenAIV2.UnitTests.csproj", "{A2E659A5-0CE5-4CBF-B9F6-F8604B2AF0BF}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ConceptsV2", "samples\ConceptsV2\ConceptsV2.csproj", "{932B6B93-C297-47BE-A061-081ACC6105FB}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "IntegrationTestsV2", "src\IntegrationTestsV2\IntegrationTestsV2.csproj", "{FDEB4884-89B9-4656-80A0-57C7464490F7}"
+EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -771,6 +779,30 @@ Global
{B0B3901E-AF56-432B-8FAA-858468E5D0DF}.Publish|Any CPU.Build.0 = Publish|Any CPU
{B0B3901E-AF56-432B-8FAA-858468E5D0DF}.Release|Any CPU.ActiveCfg = Release|Any CPU
{B0B3901E-AF56-432B-8FAA-858468E5D0DF}.Release|Any CPU.Build.0 = Release|Any CPU
+ {8AC4D976-BBBA-44C7-9CFD-567F0B4751D8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {8AC4D976-BBBA-44C7-9CFD-567F0B4751D8}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {8AC4D976-BBBA-44C7-9CFD-567F0B4751D8}.Publish|Any CPU.ActiveCfg = Debug|Any CPU
+ {8AC4D976-BBBA-44C7-9CFD-567F0B4751D8}.Publish|Any CPU.Build.0 = Debug|Any CPU
+ {8AC4D976-BBBA-44C7-9CFD-567F0B4751D8}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {8AC4D976-BBBA-44C7-9CFD-567F0B4751D8}.Release|Any CPU.Build.0 = Release|Any CPU
+ {A2E659A5-0CE5-4CBF-B9F6-F8604B2AF0BF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {A2E659A5-0CE5-4CBF-B9F6-F8604B2AF0BF}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {A2E659A5-0CE5-4CBF-B9F6-F8604B2AF0BF}.Publish|Any CPU.ActiveCfg = Debug|Any CPU
+ {A2E659A5-0CE5-4CBF-B9F6-F8604B2AF0BF}.Publish|Any CPU.Build.0 = Debug|Any CPU
+ {A2E659A5-0CE5-4CBF-B9F6-F8604B2AF0BF}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {A2E659A5-0CE5-4CBF-B9F6-F8604B2AF0BF}.Release|Any CPU.Build.0 = Release|Any CPU
+ {932B6B93-C297-47BE-A061-081ACC6105FB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {932B6B93-C297-47BE-A061-081ACC6105FB}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {932B6B93-C297-47BE-A061-081ACC6105FB}.Publish|Any CPU.ActiveCfg = Debug|Any CPU
+ {932B6B93-C297-47BE-A061-081ACC6105FB}.Publish|Any CPU.Build.0 = Debug|Any CPU
+ {932B6B93-C297-47BE-A061-081ACC6105FB}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {932B6B93-C297-47BE-A061-081ACC6105FB}.Release|Any CPU.Build.0 = Release|Any CPU
+ {FDEB4884-89B9-4656-80A0-57C7464490F7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {FDEB4884-89B9-4656-80A0-57C7464490F7}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {FDEB4884-89B9-4656-80A0-57C7464490F7}.Publish|Any CPU.ActiveCfg = Debug|Any CPU
+ {FDEB4884-89B9-4656-80A0-57C7464490F7}.Publish|Any CPU.Build.0 = Debug|Any CPU
+ {FDEB4884-89B9-4656-80A0-57C7464490F7}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {FDEB4884-89B9-4656-80A0-57C7464490F7}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
@@ -877,6 +909,10 @@ Global
{1D3EEB5B-0E06-4700-80D5-164956E43D0A} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263}
{F312FCE1-12D7-4DEF-BC29-2FF6618509F3} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263}
{B0B3901E-AF56-432B-8FAA-858468E5D0DF} = {24503383-A8C4-4255-9998-28D70FE8E99A}
+ {8AC4D976-BBBA-44C7-9CFD-567F0B4751D8} = {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1}
+ {A2E659A5-0CE5-4CBF-B9F6-F8604B2AF0BF} = {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1}
+ {932B6B93-C297-47BE-A061-081ACC6105FB} = {FA3720F1-C99A-49B2-9577-A940257098BF}
+ {FDEB4884-89B9-4656-80A0-57C7464490F7} = {831DDCA2-7D2C-4C31-80DB-6BDB3E1F7AE0}
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {FBDC56A3-86AD-4323-AA0F-201E59123B83}
diff --git a/dotnet/samples/ConceptsV2/ConceptsV2.csproj b/dotnet/samples/ConceptsV2/ConceptsV2.csproj
new file mode 100644
index 000000000000..a9fe41232166
--- /dev/null
+++ b/dotnet/samples/ConceptsV2/ConceptsV2.csproj
@@ -0,0 +1,72 @@
+
+
+
+ Concepts
+
+ net8.0
+ enable
+ false
+ true
+
+ $(NoWarn);CS8618,IDE0009,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0101,SKEXP0110
+ Library
+ 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0
+
+
+
+
+
+
+
+
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+ all
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ PreserveNewest
+
+
+
+
+ Always
+
+
+
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Connectors.OpenAIV2.UnitTests.csproj b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Connectors.OpenAIV2.UnitTests.csproj
new file mode 100644
index 000000000000..046b5999bee6
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Connectors.OpenAIV2.UnitTests.csproj
@@ -0,0 +1,39 @@
+
+
+
+ SemanticKernel.Connectors.OpenAI.UnitTests
+ $(AssemblyName)
+ net8.0
+ true
+ enable
+ false
+ $(NoWarn);SKEXP0001;SKEXP0070;CS1591;IDE1006;RCS1261;CA1031;CA1308;CA1861;CA2007;CA2234;VSTHRD111
+
+
+
+
+
+
+
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+ all
+
+
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+ all
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2/Connectors.OpenAIV2.csproj b/dotnet/src/Connectors/Connectors.OpenAIV2/Connectors.OpenAIV2.csproj
new file mode 100644
index 000000000000..3e51e9674e21
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2/Connectors.OpenAIV2.csproj
@@ -0,0 +1,34 @@
+
+
+
+
+ Microsoft.SemanticKernel.Connectors.OpenAI
+ $(AssemblyName)
+ net8.0;netstandard2.0
+ true
+ $(NoWarn);NU5104;SKEXP0001,SKEXP0010
+ true
+
+
+
+
+
+
+
+
+ Semantic Kernel - OpenAI and Azure OpenAI connectors
+ Semantic Kernel connectors for OpenAI and Azure OpenAI. Contains clients for text generation, chat completion, embedding and DALL-E text to image.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/dotnet/src/IntegrationTestsV2/IntegrationTestsV2.csproj b/dotnet/src/IntegrationTestsV2/IntegrationTestsV2.csproj
new file mode 100644
index 000000000000..cbfbfe9e4df3
--- /dev/null
+++ b/dotnet/src/IntegrationTestsV2/IntegrationTestsV2.csproj
@@ -0,0 +1,67 @@
+
+
+ IntegrationTests
+ SemanticKernel.IntegrationTests
+ net8.0
+ true
+ false
+ $(NoWarn);CA2007,CA1861,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0110
+ b7762d10-e29b-4bb1-8b74-b6d69a667dd4
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+ all
+
+
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+ all
+
+
+
+
+
+
+
+
+
+
+
+
+ Always
+
+
+ Always
+
+
+ Always
+
+
+
+
+
+ Always
+
+
+
\ No newline at end of file
From 00f80bc21278650749fffa22a3f6498e9c267ed2 Mon Sep 17 00:00:00 2001
From: SergeyMenshykh
Date: Thu, 20 Jun 2024 22:23:34 +0100
Subject: [PATCH 002/226] feat: add empty AzureOpenAI and AzureOpenAI.UnitTests
projects.
---
dotnet/SK-dotnet.sln | 20 ++++++++-
.../Connectors.AzureOpenAI.UnitTests.csproj | 41 +++++++++++++++++++
.../Connectors.AzureOpenAI.csproj | 34 +++++++++++++++
3 files changed, 94 insertions(+), 1 deletion(-)
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Connectors.AzureOpenAI.UnitTests.csproj
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI/Connectors.AzureOpenAI.csproj
diff --git a/dotnet/SK-dotnet.sln b/dotnet/SK-dotnet.sln
index 9f09181e3846..e87e6db29e1b 100644
--- a/dotnet/SK-dotnet.sln
+++ b/dotnet/SK-dotnet.sln
@@ -320,7 +320,11 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.OpenAIV2.UnitTes
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ConceptsV2", "samples\ConceptsV2\ConceptsV2.csproj", "{932B6B93-C297-47BE-A061-081ACC6105FB}"
EndProject
-Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "IntegrationTestsV2", "src\IntegrationTestsV2\IntegrationTestsV2.csproj", "{FDEB4884-89B9-4656-80A0-57C7464490F7}"
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "IntegrationTestsV2", "src\IntegrationTestsV2\IntegrationTestsV2.csproj", "{FDEB4884-89B9-4656-80A0-57C7464490F7}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.AzureOpenAI", "src\Connectors\Connectors.AzureOpenAI\Connectors.AzureOpenAI.csproj", "{6744272E-8326-48CE-9A3F-6BE227A5E777}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Connectors.AzureOpenAI.UnitTests", "src\Connectors\Connectors.AzureOpenAI.UnitTests\Connectors.AzureOpenAI.UnitTests.csproj", "{DB219924-208B-4CDD-8796-EE424689901E}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
@@ -803,6 +807,18 @@ Global
{FDEB4884-89B9-4656-80A0-57C7464490F7}.Publish|Any CPU.Build.0 = Debug|Any CPU
{FDEB4884-89B9-4656-80A0-57C7464490F7}.Release|Any CPU.ActiveCfg = Release|Any CPU
{FDEB4884-89B9-4656-80A0-57C7464490F7}.Release|Any CPU.Build.0 = Release|Any CPU
+ {6744272E-8326-48CE-9A3F-6BE227A5E777}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {6744272E-8326-48CE-9A3F-6BE227A5E777}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {6744272E-8326-48CE-9A3F-6BE227A5E777}.Publish|Any CPU.ActiveCfg = Debug|Any CPU
+ {6744272E-8326-48CE-9A3F-6BE227A5E777}.Publish|Any CPU.Build.0 = Debug|Any CPU
+ {6744272E-8326-48CE-9A3F-6BE227A5E777}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {6744272E-8326-48CE-9A3F-6BE227A5E777}.Release|Any CPU.Build.0 = Release|Any CPU
+ {DB219924-208B-4CDD-8796-EE424689901E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {DB219924-208B-4CDD-8796-EE424689901E}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {DB219924-208B-4CDD-8796-EE424689901E}.Publish|Any CPU.ActiveCfg = Debug|Any CPU
+ {DB219924-208B-4CDD-8796-EE424689901E}.Publish|Any CPU.Build.0 = Debug|Any CPU
+ {DB219924-208B-4CDD-8796-EE424689901E}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {DB219924-208B-4CDD-8796-EE424689901E}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
@@ -913,6 +929,8 @@ Global
{A2E659A5-0CE5-4CBF-B9F6-F8604B2AF0BF} = {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1}
{932B6B93-C297-47BE-A061-081ACC6105FB} = {FA3720F1-C99A-49B2-9577-A940257098BF}
{FDEB4884-89B9-4656-80A0-57C7464490F7} = {831DDCA2-7D2C-4C31-80DB-6BDB3E1F7AE0}
+ {6744272E-8326-48CE-9A3F-6BE227A5E777} = {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1}
+ {DB219924-208B-4CDD-8796-EE424689901E} = {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1}
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {FBDC56A3-86AD-4323-AA0F-201E59123B83}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Connectors.AzureOpenAI.UnitTests.csproj b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Connectors.AzureOpenAI.UnitTests.csproj
new file mode 100644
index 000000000000..703061c403a2
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Connectors.AzureOpenAI.UnitTests.csproj
@@ -0,0 +1,41 @@
+
+
+
+
+ SemanticKernel.Connectors.AzureOpenAI.UnitTests
+ $(AssemblyName)
+ net8.0
+ true
+ enable
+ false
+ $(NoWarn);SKEXP0001;SKEXP0010;CA2007,CA1806,CA1869,CA1861,IDE0300,VSTHRD111
+
+
+
+
+
+
+
+
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+ all
+
+
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+ all
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Connectors.AzureOpenAI.csproj b/dotnet/src/Connectors/Connectors.AzureOpenAI/Connectors.AzureOpenAI.csproj
new file mode 100644
index 000000000000..837dd5b3c1db
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Connectors.AzureOpenAI.csproj
@@ -0,0 +1,34 @@
+
+
+
+
+ Microsoft.SemanticKernel.Connectors.AzureOpenAI
+ $(AssemblyName)
+ net8.0;netstandard2.0
+ true
+ $(NoWarn);NU5104;SKEXP0001,SKEXP0010
+ false
+
+
+
+
+
+
+
+
+ Semantic Kernel - Azure OpenAI connectors
+ Semantic Kernel connectors for Azure OpenAI. Contains clients for text generation, chat completion, embedding and DALL-E text to image.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
From 5cd0a2809c725788cbf8317ab1b8461bd6af7dfa Mon Sep 17 00:00:00 2001
From: SergeyMenshykh
Date: Thu, 20 Jun 2024 23:25:14 +0100
Subject: [PATCH 003/226] fix: temporarily disable package validation
---
.../Connectors/Connectors.OpenAIV2/Connectors.OpenAIV2.csproj | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2/Connectors.OpenAIV2.csproj b/dotnet/src/Connectors/Connectors.OpenAIV2/Connectors.OpenAIV2.csproj
index 3e51e9674e21..d5e129765dc9 100644
--- a/dotnet/src/Connectors/Connectors.OpenAIV2/Connectors.OpenAIV2.csproj
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2/Connectors.OpenAIV2.csproj
@@ -7,7 +7,7 @@
net8.0;netstandard2.0
true
$(NoWarn);NU5104;SKEXP0001,SKEXP0010
- true
+ false
From 58523951d3c1b7b3e3cda36c23d2a3cc9b872ce0 Mon Sep 17 00:00:00 2001
From: SergeyMenshykh
Date: Thu, 20 Jun 2024 23:39:47 +0100
Subject: [PATCH 004/226] fix: publish configuration for the OpenAIV2 project
---
dotnet/SK-dotnet.sln | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/dotnet/SK-dotnet.sln b/dotnet/SK-dotnet.sln
index e87e6db29e1b..79f0e6bb5596 100644
--- a/dotnet/SK-dotnet.sln
+++ b/dotnet/SK-dotnet.sln
@@ -785,8 +785,8 @@ Global
{B0B3901E-AF56-432B-8FAA-858468E5D0DF}.Release|Any CPU.Build.0 = Release|Any CPU
{8AC4D976-BBBA-44C7-9CFD-567F0B4751D8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{8AC4D976-BBBA-44C7-9CFD-567F0B4751D8}.Debug|Any CPU.Build.0 = Debug|Any CPU
- {8AC4D976-BBBA-44C7-9CFD-567F0B4751D8}.Publish|Any CPU.ActiveCfg = Debug|Any CPU
- {8AC4D976-BBBA-44C7-9CFD-567F0B4751D8}.Publish|Any CPU.Build.0 = Debug|Any CPU
+ {8AC4D976-BBBA-44C7-9CFD-567F0B4751D8}.Publish|Any CPU.ActiveCfg = Publish|Any CPU
+ {8AC4D976-BBBA-44C7-9CFD-567F0B4751D8}.Publish|Any CPU.Build.0 = Publish|Any CPU
{8AC4D976-BBBA-44C7-9CFD-567F0B4751D8}.Release|Any CPU.ActiveCfg = Release|Any CPU
{8AC4D976-BBBA-44C7-9CFD-567F0B4751D8}.Release|Any CPU.Build.0 = Release|Any CPU
{A2E659A5-0CE5-4CBF-B9F6-F8604B2AF0BF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
From e7632f00e2c399ea239418254c0b47ab2737e462 Mon Sep 17 00:00:00 2001
From: SergeyMenshykh <68852919+SergeyMenshykh@users.noreply.github.com>
Date: Thu, 20 Jun 2024 16:02:36 -0700
Subject: [PATCH 005/226] .Net: Empty projects for the new AzureOpenAI
connector (#6900)
Empty Connectors.AzureOpenAI and Connectors.AzureOpenAI.UnitTests
projects as a first step to start building AzureOpenAI conector based on
new AzureOpenAI SDK.
Co-authored-by: Dmytro Struk <13853051+dmytrostruk@users.noreply.github.com>
---
dotnet/SK-dotnet.sln | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/dotnet/SK-dotnet.sln b/dotnet/SK-dotnet.sln
index 79f0e6bb5596..01ffff52057a 100644
--- a/dotnet/SK-dotnet.sln
+++ b/dotnet/SK-dotnet.sln
@@ -809,8 +809,8 @@ Global
{FDEB4884-89B9-4656-80A0-57C7464490F7}.Release|Any CPU.Build.0 = Release|Any CPU
{6744272E-8326-48CE-9A3F-6BE227A5E777}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{6744272E-8326-48CE-9A3F-6BE227A5E777}.Debug|Any CPU.Build.0 = Debug|Any CPU
- {6744272E-8326-48CE-9A3F-6BE227A5E777}.Publish|Any CPU.ActiveCfg = Debug|Any CPU
- {6744272E-8326-48CE-9A3F-6BE227A5E777}.Publish|Any CPU.Build.0 = Debug|Any CPU
+ {6744272E-8326-48CE-9A3F-6BE227A5E777}.Publish|Any CPU.ActiveCfg = Publish|Any CPU
+ {6744272E-8326-48CE-9A3F-6BE227A5E777}.Publish|Any CPU.Build.0 = Publish|Any CPU
{6744272E-8326-48CE-9A3F-6BE227A5E777}.Release|Any CPU.ActiveCfg = Release|Any CPU
{6744272E-8326-48CE-9A3F-6BE227A5E777}.Release|Any CPU.Build.0 = Release|Any CPU
{DB219924-208B-4CDD-8796-EE424689901E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
From af19aa707569348b2e3e3d6e57a5918271be576c Mon Sep 17 00:00:00 2001
From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com>
Date: Mon, 24 Jun 2024 09:37:31 +0100
Subject: [PATCH 006/226] .Net OpenAI SDK V2 - Phase 01 Embeddings + ClientCore
(Feature Branch) (#6898)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## ClientCore + Foundation
This PR is the first and starts the foundation structure and classes for
the V2 OpenAI Connector.
In this PR I also used the simpler `TextEmbeddings` service to wrap up a
vertical slice of the Service + Client + UT + IT + Dependencies needed
also to validate the proposed structure of folders and namespaces for
internal and public components.
### ClientCore
As part of this PR I'm also taking benefit of the `partial` keyword for
`ClientCore` class dividing its implementation per Service. In the
original V1 ClientCore the file was very big, and creating specific
files PR service/modality will make it simpler and easier to maintain.
## What Changed
This change includes a new update from previous `Azure.Core` Pipeline
abstractions to the new `System.ClientModel` which is used by `OpenAI`
package.
Those include the update and addition of the below files:
- AddHeaderRequestPolicy - Adapted from previous
`AddHeaderRequestPolicy`
- ClientResultExceptionExtensions - Adapted from previous
`RequestExceptionExtensions`
- OpenAIClientCore - Merged with ClientCore (No more need for a
specialized Azure and OpenAI clients)
- ClientCore (Updated internals just with necessary for Text
Embeddings), merged `OpenAIClientCore` also into this one and made it
not as `abstract` class.
- OpenAITextEmbbedingGenerationService (Updated to use `ClientCore`
directly instead of `OpenAIClientCore`.
## Whats New
- [PipelineSynchronousPolicy -
Azure.Core/src/Pipeline/HttpPipelineSynchronousPolicy.cs](https://github.com/Azure/azure-sdk-for-net/blob/8bd22837639d54acccc820e988747f8d28bbde4a/sdk/core/Azure.Core/src/Pipeline/HttpPipelineSynchronousPolicy.cs#L18)
This file didn't exist and was necessary to add as it is a dependency
for `AddHeaderRequestPolicy`
- Mockups added for `System.ClientModel` pipeline testing
- Unit Tests Covering
- ClientCore
- OpenAITextEmbeddingsGenerationService
- AddHeadersRequestPolicy
- PipelineSynchronousPolicy
- ClientResultExceptionExtensions
- Integration Tests
- OpenAITextEmbeddingsGenerationService (Moved from V1)
## What was Removed
- OpenAIClientCore - This class was merged in ClientCore
- CustomHostPipelinePolicy - Removed as the new OpenAI SDK supports
Non-Default OpenAI endpoints.
## Unit & Integration Test
Differently from V1, this PR focus on individual UnitTest for the OpenAI
connector only.
With the target of above 80% code converage the Unit Tests targets
Services + Clients + Extensions & Utilities
The structure of folders and tested components on the UnitTests will
follow the same structure defined in project under test.
---
.../Connectors.OpenAIV2.UnitTests.csproj | 16 +-
.../Core/ClientCoreTests.cs | 188 ++++++++++++++++++
.../Models/AddHeaderRequestPolicyTests.cs | 43 ++++
.../Models/PipelineSynchronousPolicyTests.cs | 56 ++++++
.../ClientResultExceptionExtensionsTests.cs | 73 +++++++
...enAITextEmbeddingGenerationServiceTests.cs | 86 ++++++++
.../text-embeddings-multiple-response.txt | 20 ++
.../TestData/text-embeddings-response.txt | 15 ++
.../Utils/MockPipelineResponse.cs | 156 +++++++++++++++
.../Utils/MockResponseHeaders.cs | 37 ++++
.../Connectors.OpenAIV2.csproj | 1 +
.../Core/ClientCore.Embeddings.cs | 64 ++++++
.../Connectors.OpenAIV2/Core/ClientCore.cs | 187 +++++++++++++++++
.../Core/Models/AddHeaderRequestPolicy.cs | 23 +++
.../Core/Models/PipelineSynchronousPolicy.cs | 89 +++++++++
.../ClientResultExceptionExtensions.cs | 44 ++++
.../OpenAITextEmbbedingGenerationService.cs | 85 ++++++++
dotnet/src/IntegrationTestsV2/.editorconfig | 6 +
.../OpenAI/OpenAITextEmbeddingTests.cs | 63 ++++++
.../IntegrationTestsV2.csproj | 8 +-
.../TestSettings/OpenAIConfiguration.cs | 15 ++
21 files changed, 1268 insertions(+), 7 deletions(-)
create mode 100644 dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Core/ClientCoreTests.cs
create mode 100644 dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Core/Models/AddHeaderRequestPolicyTests.cs
create mode 100644 dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Core/Models/PipelineSynchronousPolicyTests.cs
create mode 100644 dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Extensions/ClientResultExceptionExtensionsTests.cs
create mode 100644 dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Services/OpenAITextEmbeddingGenerationServiceTests.cs
create mode 100644 dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/TestData/text-embeddings-multiple-response.txt
create mode 100644 dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/TestData/text-embeddings-response.txt
create mode 100644 dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Utils/MockPipelineResponse.cs
create mode 100644 dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Utils/MockResponseHeaders.cs
create mode 100644 dotnet/src/Connectors/Connectors.OpenAIV2/Core/ClientCore.Embeddings.cs
create mode 100644 dotnet/src/Connectors/Connectors.OpenAIV2/Core/ClientCore.cs
create mode 100644 dotnet/src/Connectors/Connectors.OpenAIV2/Core/Models/AddHeaderRequestPolicy.cs
create mode 100644 dotnet/src/Connectors/Connectors.OpenAIV2/Core/Models/PipelineSynchronousPolicy.cs
create mode 100644 dotnet/src/Connectors/Connectors.OpenAIV2/Extensions/ClientResultExceptionExtensions.cs
create mode 100644 dotnet/src/Connectors/Connectors.OpenAIV2/Services/OpenAITextEmbbedingGenerationService.cs
create mode 100644 dotnet/src/IntegrationTestsV2/.editorconfig
create mode 100644 dotnet/src/IntegrationTestsV2/Connectors/OpenAI/OpenAITextEmbeddingTests.cs
create mode 100644 dotnet/src/IntegrationTestsV2/TestSettings/OpenAIConfiguration.cs
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Connectors.OpenAIV2.UnitTests.csproj b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Connectors.OpenAIV2.UnitTests.csproj
index 046b5999bee6..0d89e02beb21 100644
--- a/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Connectors.OpenAIV2.UnitTests.csproj
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Connectors.OpenAIV2.UnitTests.csproj
@@ -1,4 +1,4 @@
-
+
SemanticKernel.Connectors.OpenAI.UnitTests
@@ -7,7 +7,7 @@
true
enable
false
- $(NoWarn);SKEXP0001;SKEXP0070;CS1591;IDE1006;RCS1261;CA1031;CA1308;CA1861;CA2007;CA2234;VSTHRD111
+ $(NoWarn);SKEXP0001;SKEXP0070;SKEXP0010;CS1591;IDE1006;RCS1261;CA1031;CA1308;CA1861;CA2007;CA2234;VSTHRD111
@@ -29,11 +29,21 @@
-
+
+
+
+
+
+ Always
+
+
+ Always
+
+
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Core/ClientCoreTests.cs b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Core/ClientCoreTests.cs
new file mode 100644
index 000000000000..a3415663459a
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Core/ClientCoreTests.cs
@@ -0,0 +1,188 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.ClientModel;
+using System.ClientModel.Primitives;
+using System.Linq;
+using System.Net.Http;
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.Extensions.Logging;
+using Microsoft.Extensions.Logging.Abstractions;
+using Microsoft.SemanticKernel.Connectors.OpenAI;
+using Microsoft.SemanticKernel.Http;
+using Moq;
+using OpenAI;
+using Xunit;
+
+namespace SemanticKernel.Connectors.OpenAI.UnitTests.Core;
+public partial class ClientCoreTests
+{
+ [Fact]
+ public void ItCanBeInstantiatedAndPropertiesSetAsExpected()
+ {
+ // Act
+ var logger = new Mock>().Object;
+ var openAIClient = new OpenAIClient(new ApiKeyCredential("key"));
+
+ var clientCoreModelConstructor = new ClientCore("model1", "apiKey");
+ var clientCoreOpenAIClientConstructor = new ClientCore("model1", openAIClient, logger: logger);
+
+ // Assert
+ Assert.NotNull(clientCoreModelConstructor);
+ Assert.NotNull(clientCoreOpenAIClientConstructor);
+
+ Assert.Equal("model1", clientCoreModelConstructor.ModelId);
+ Assert.Equal("model1", clientCoreOpenAIClientConstructor.ModelId);
+
+ Assert.NotNull(clientCoreModelConstructor.Client);
+ Assert.NotNull(clientCoreOpenAIClientConstructor.Client);
+ Assert.Equal(openAIClient, clientCoreOpenAIClientConstructor.Client);
+ Assert.Equal(NullLogger.Instance, clientCoreModelConstructor.Logger);
+ Assert.Equal(logger, clientCoreOpenAIClientConstructor.Logger);
+ }
+
+ [Theory]
+ [InlineData(null, null)]
+ [InlineData("http://localhost", null)]
+ [InlineData(null, "http://localhost")]
+ [InlineData("http://localhost-1", "http://localhost-2")]
+ public void ItUsesEndpointAsExpected(string? clientBaseAddress, string? providedEndpoint)
+ {
+ // Arrange
+ Uri? endpoint = null;
+ HttpClient? client = null;
+ if (providedEndpoint is not null)
+ {
+ endpoint = new Uri(providedEndpoint);
+ }
+
+ if (clientBaseAddress is not null)
+ {
+ client = new HttpClient { BaseAddress = new Uri(clientBaseAddress) };
+ }
+
+ // Act
+ var clientCore = new ClientCore("model", "apiKey", endpoint: endpoint, httpClient: client);
+
+ // Assert
+ Assert.Equal(endpoint ?? client?.BaseAddress ?? new Uri("https://api.openai.com/v1"), clientCore.Endpoint);
+
+ client?.Dispose();
+ }
+
+ [Theory]
+ [InlineData(true)]
+ [InlineData(false)]
+ public async Task ItAddOrganizationHeaderWhenProvidedAsync(bool organizationIdProvided)
+ {
+ using HttpMessageHandlerStub handler = new();
+ using HttpClient client = new(handler);
+ handler.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK);
+
+ // Act
+ var clientCore = new ClientCore(
+ modelId: "model",
+ apiKey: "test",
+ organizationId: (organizationIdProvided) ? "organization" : null,
+ httpClient: client);
+
+ var pipelineMessage = clientCore.Client.Pipeline.CreateMessage();
+ pipelineMessage.Request.Method = "POST";
+ pipelineMessage.Request.Uri = new Uri("http://localhost");
+ pipelineMessage.Request.Content = BinaryContent.Create(new BinaryData("test"));
+
+ // Assert
+ await clientCore.Client.Pipeline.SendAsync(pipelineMessage);
+
+ if (organizationIdProvided)
+ {
+ Assert.True(handler.RequestHeaders!.Contains("OpenAI-Organization"));
+ Assert.Equal("organization", handler.RequestHeaders.GetValues("OpenAI-Organization").FirstOrDefault());
+ }
+ else
+ {
+ Assert.False(handler.RequestHeaders!.Contains("OpenAI-Organization"));
+ }
+ }
+
+ [Fact]
+ public async Task ItAddSemanticKernelHeadersOnEachRequestAsync()
+ {
+ using HttpMessageHandlerStub handler = new();
+ using HttpClient client = new(handler);
+ handler.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK);
+
+ // Act
+ var clientCore = new ClientCore(modelId: "model", apiKey: "test", httpClient: client);
+
+ var pipelineMessage = clientCore.Client.Pipeline.CreateMessage();
+ pipelineMessage.Request.Method = "POST";
+ pipelineMessage.Request.Uri = new Uri("http://localhost");
+ pipelineMessage.Request.Content = BinaryContent.Create(new BinaryData("test"));
+
+ // Assert
+ await clientCore.Client.Pipeline.SendAsync(pipelineMessage);
+
+ Assert.True(handler.RequestHeaders!.Contains(HttpHeaderConstant.Names.SemanticKernelVersion));
+ Assert.Equal(HttpHeaderConstant.Values.GetAssemblyVersion(typeof(ClientCore)), handler.RequestHeaders.GetValues(HttpHeaderConstant.Names.SemanticKernelVersion).FirstOrDefault());
+
+ Assert.True(handler.RequestHeaders.Contains("User-Agent"));
+ Assert.Contains(HttpHeaderConstant.Values.UserAgent, handler.RequestHeaders.GetValues("User-Agent").FirstOrDefault());
+ }
+
+ [Fact]
+ public async Task ItDoNotAddSemanticKernelHeadersWhenOpenAIClientIsProvidedAsync()
+ {
+ using HttpMessageHandlerStub handler = new();
+ using HttpClient client = new(handler);
+ handler.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK);
+
+ // Act
+ var clientCore = new ClientCore(
+ modelId: "model",
+ openAIClient: new OpenAIClient(
+ new ApiKeyCredential("test"),
+ new OpenAIClientOptions()
+ {
+ Transport = new HttpClientPipelineTransport(client),
+ RetryPolicy = new ClientRetryPolicy(maxRetries: 0),
+ NetworkTimeout = Timeout.InfiniteTimeSpan
+ }));
+
+ var pipelineMessage = clientCore.Client.Pipeline.CreateMessage();
+ pipelineMessage.Request.Method = "POST";
+ pipelineMessage.Request.Uri = new Uri("http://localhost");
+ pipelineMessage.Request.Content = BinaryContent.Create(new BinaryData("test"));
+
+ // Assert
+ await clientCore.Client.Pipeline.SendAsync(pipelineMessage);
+
+ Assert.False(handler.RequestHeaders!.Contains(HttpHeaderConstant.Names.SemanticKernelVersion));
+ Assert.DoesNotContain(HttpHeaderConstant.Values.UserAgent, handler.RequestHeaders.GetValues("User-Agent").FirstOrDefault());
+ }
+
+ [Theory]
+ [InlineData(null)]
+ [InlineData("")]
+ [InlineData("value")]
+ public void ItAddAttributesButDoesNothingIfNullOrEmpty(string? value)
+ {
+ // Arrange
+ var clientCore = new ClientCore("model", "apikey");
+ // Act
+
+ clientCore.AddAttribute("key", value);
+
+ // Assert
+ if (string.IsNullOrEmpty(value))
+ {
+ Assert.False(clientCore.Attributes.ContainsKey("key"));
+ }
+ else
+ {
+ Assert.True(clientCore.Attributes.ContainsKey("key"));
+ Assert.Equal(value, clientCore.Attributes["key"]);
+ }
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Core/Models/AddHeaderRequestPolicyTests.cs b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Core/Models/AddHeaderRequestPolicyTests.cs
new file mode 100644
index 000000000000..83ec6a20568d
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Core/Models/AddHeaderRequestPolicyTests.cs
@@ -0,0 +1,43 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.ClientModel.Primitives;
+using Microsoft.SemanticKernel.Connectors.OpenAI;
+using Xunit;
+
+namespace SemanticKernel.Connectors.OpenAI.UnitTests.Core.Models;
+
+public class AddHeaderRequestPolicyTests
+{
+ [Fact]
+ public void ItCanBeInstantiated()
+ {
+ // Arrange
+ var headerName = "headerName";
+ var headerValue = "headerValue";
+
+ // Act
+ var addHeaderRequestPolicy = new AddHeaderRequestPolicy(headerName, headerValue);
+
+ // Assert
+ Assert.NotNull(addHeaderRequestPolicy);
+ }
+
+ [Fact]
+ public void ItOnSendingRequestAddsHeaderToRequest()
+ {
+ // Arrange
+ var headerName = "headerName";
+ var headerValue = "headerValue";
+ var addHeaderRequestPolicy = new AddHeaderRequestPolicy(headerName, headerValue);
+ var pipeline = ClientPipeline.Create();
+ var message = pipeline.CreateMessage();
+
+ // Act
+ addHeaderRequestPolicy.OnSendingRequest(message);
+
+ // Assert
+ message.Request.Headers.TryGetValue(headerName, out var value);
+ Assert.NotNull(value);
+ Assert.Equal(headerValue, value);
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Core/Models/PipelineSynchronousPolicyTests.cs b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Core/Models/PipelineSynchronousPolicyTests.cs
new file mode 100644
index 000000000000..cae4b32b4283
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Core/Models/PipelineSynchronousPolicyTests.cs
@@ -0,0 +1,56 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.ClientModel.Primitives;
+using System.Collections.Generic;
+using System.Threading.Tasks;
+using Microsoft.SemanticKernel.Connectors.OpenAI;
+using Xunit;
+
+namespace SemanticKernel.Connectors.OpenAI.UnitTests.Core.Models;
+public class PipelineSynchronousPolicyTests
+{
+ [Fact]
+ public async Task ItProcessAsyncWhenSpecializationHasReceivedResponseOverrideShouldCallIt()
+ {
+ // Arrange
+ var first = new MyHttpPipelinePolicyWithoutOverride();
+ var last = new MyHttpPipelinePolicyWithOverride();
+
+ IReadOnlyList policies = [first, last];
+
+ // Act
+ await policies[0].ProcessAsync(ClientPipeline.Create().CreateMessage(), policies, 0);
+
+ // Assert
+ Assert.True(first.CalledProcess);
+ Assert.True(last.CalledProcess);
+ Assert.True(last.CalledOnReceivedResponse);
+ }
+
+ private class MyHttpPipelinePolicyWithoutOverride : PipelineSynchronousPolicy
+ {
+ public bool CalledProcess { get; private set; }
+
+ public override void Process(PipelineMessage message, IReadOnlyList pipeline, int currentIndex)
+ {
+ this.CalledProcess = true;
+ base.Process(message, pipeline, currentIndex);
+ }
+
+ public override ValueTask ProcessAsync(PipelineMessage message, IReadOnlyList pipeline, int currentIndex)
+ {
+ this.CalledProcess = true;
+ return base.ProcessAsync(message, pipeline, currentIndex);
+ }
+ }
+
+ private sealed class MyHttpPipelinePolicyWithOverride : MyHttpPipelinePolicyWithoutOverride
+ {
+ public bool CalledOnReceivedResponse { get; private set; }
+
+ public override void OnReceivedResponse(PipelineMessage message)
+ {
+ this.CalledOnReceivedResponse = true;
+ }
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Extensions/ClientResultExceptionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Extensions/ClientResultExceptionExtensionsTests.cs
new file mode 100644
index 000000000000..0b95f904d893
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Extensions/ClientResultExceptionExtensionsTests.cs
@@ -0,0 +1,73 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.ClientModel;
+using System.ClientModel.Primitives;
+using Microsoft.SemanticKernel.Connectors.OpenAI;
+using Xunit;
+
+namespace SemanticKernel.Connectors.OpenAI.UnitTests.Extensions;
+
+public class ClientResultExceptionExtensionsTests
+{
+ [Fact]
+ public void ItCanRecoverFromResponseErrorAndConvertsToHttpOperationExceptionWithDefaultData()
+ {
+ // Arrange
+ var exception = new ClientResultException("message", ClientPipeline.Create().CreateMessage().Response);
+
+ // Act
+ var httpOperationException = exception.ToHttpOperationException();
+
+ // Assert
+ Assert.NotNull(httpOperationException);
+ Assert.Equal(exception, httpOperationException.InnerException);
+ Assert.Equal(exception.Message, httpOperationException.Message);
+ Assert.Null(httpOperationException.ResponseContent);
+ Assert.Null(httpOperationException.StatusCode);
+ }
+
+ [Fact]
+ public void ItCanProvideResponseContentAndStatusCode()
+ {
+ // Arrange
+ using var pipelineResponse = new MockPipelineResponse();
+
+ pipelineResponse.SetContent("content");
+ pipelineResponse.SetStatus(200);
+
+ var exception = new ClientResultException("message", pipelineResponse);
+
+ // Act
+ var httpOperationException = exception.ToHttpOperationException();
+
+ // Assert
+ Assert.NotNull(httpOperationException);
+ Assert.NotNull(httpOperationException.StatusCode);
+ Assert.Equal(exception, httpOperationException.InnerException);
+ Assert.Equal(exception.Message, httpOperationException.Message);
+ Assert.Equal(pipelineResponse.Content.ToString(), httpOperationException.ResponseContent);
+ Assert.Equal(pipelineResponse.Status, (int)httpOperationException.StatusCode!);
+ }
+
+ [Fact]
+ public void ItProvideStatusForResponsesWithoutContent()
+ {
+ // Arrange
+ using var pipelineResponse = new MockPipelineResponse();
+
+ pipelineResponse.SetStatus(200);
+
+ var exception = new ClientResultException("message", pipelineResponse);
+
+ // Act
+ var httpOperationException = exception.ToHttpOperationException();
+
+ // Assert
+ Assert.NotNull(httpOperationException);
+ Assert.NotNull(httpOperationException.StatusCode);
+ Assert.Empty(httpOperationException.ResponseContent!);
+ Assert.Equal(exception, httpOperationException.InnerException);
+ Assert.Equal(exception.Message, httpOperationException.Message);
+ Assert.Equal(pipelineResponse.Status, (int)httpOperationException.StatusCode!);
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Services/OpenAITextEmbeddingGenerationServiceTests.cs b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Services/OpenAITextEmbeddingGenerationServiceTests.cs
new file mode 100644
index 000000000000..25cdc4ec61aa
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Services/OpenAITextEmbeddingGenerationServiceTests.cs
@@ -0,0 +1,86 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.ClientModel;
+using System.IO;
+using System.Net;
+using System.Net.Http;
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Connectors.OpenAI;
+using Microsoft.SemanticKernel.Services;
+using OpenAI;
+using Xunit;
+
+namespace SemanticKernel.Connectors.OpenAI.UnitTests.Services;
+public class OpenAITextEmbeddingGenerationServiceTests
+{
+ [Fact]
+ public void ItCanBeInstantiatedAndPropertiesSetAsExpected()
+ {
+ // Arrange
+ var sut = new OpenAITextEmbeddingGenerationService("model", "apiKey", dimensions: 2);
+ var sutWithOpenAIClient = new OpenAITextEmbeddingGenerationService("model", new OpenAIClient(new ApiKeyCredential("apiKey")), dimensions: 2);
+
+ // Assert
+ Assert.NotNull(sut);
+ Assert.NotNull(sutWithOpenAIClient);
+ Assert.Equal("model", sut.Attributes[AIServiceExtensions.ModelIdKey]);
+ Assert.Equal("model", sutWithOpenAIClient.Attributes[AIServiceExtensions.ModelIdKey]);
+ }
+
+ [Fact]
+ public async Task ItGetEmbeddingsAsyncReturnsEmptyWhenProvidedDataIsEmpty()
+ {
+ // Arrange
+ var sut = new OpenAITextEmbeddingGenerationService("model", "apikey");
+
+ // Act
+ var result = await sut.GenerateEmbeddingsAsync([], null, CancellationToken.None);
+
+ // Assert
+ Assert.Empty(result);
+ }
+
+ [Fact]
+ public async Task IGetEmbeddingsAsyncReturnsEmptyWhenProvidedDataIsWhitespace()
+ {
+ using HttpMessageHandlerStub handler = new()
+ {
+ ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK)
+ {
+ Content = new StringContent(File.ReadAllText("./TestData/text-embeddings-response.txt"))
+ }
+ };
+ using HttpClient client = new(handler);
+
+ // Arrange
+ var sut = new OpenAITextEmbeddingGenerationService("model", "apikey", httpClient: client);
+
+ // Act
+ var result = await sut.GenerateEmbeddingsAsync(["test"], null, CancellationToken.None);
+
+ // Assert
+ Assert.Single(result);
+ Assert.Equal(4, result[0].Length);
+ }
+
+ [Fact]
+ public async Task ItThrowsIfNumberOfResultsDiffersFromInputsAsync()
+ {
+ using HttpMessageHandlerStub handler = new()
+ {
+ ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK)
+ {
+ Content = new StringContent(File.ReadAllText("./TestData/text-embeddings-multiple-response.txt"))
+ }
+ };
+ using HttpClient client = new(handler);
+
+ // Arrange
+ var sut = new OpenAITextEmbeddingGenerationService("model", "apikey", httpClient: client);
+
+ // Act & Assert
+ await Assert.ThrowsAsync(async () => await sut.GenerateEmbeddingsAsync(["test"], null, CancellationToken.None));
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/TestData/text-embeddings-multiple-response.txt b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/TestData/text-embeddings-multiple-response.txt
new file mode 100644
index 000000000000..46a9581cf0cc
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/TestData/text-embeddings-multiple-response.txt
@@ -0,0 +1,20 @@
+{
+ "object": "list",
+ "data": [
+ {
+ "object": "embedding",
+ "index": 0,
+ "embedding": "zcyMP83MDEAzM1NAzcyMQA=="
+ },
+ {
+ "object": "embedding",
+ "index": 1,
+ "embedding": "zcyMP83MDEAzM1NAzcyMQA=="
+ }
+ ],
+ "model": "text-embedding-ada-002",
+ "usage": {
+ "prompt_tokens": 7,
+ "total_tokens": 7
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/TestData/text-embeddings-response.txt b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/TestData/text-embeddings-response.txt
new file mode 100644
index 000000000000..c715b851b78c
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/TestData/text-embeddings-response.txt
@@ -0,0 +1,15 @@
+{
+ "object": "list",
+ "data": [
+ {
+ "object": "embedding",
+ "index": 0,
+ "embedding": "zcyMP83MDEAzM1NAzcyMQA=="
+ }
+ ],
+ "model": "text-embedding-ada-002",
+ "usage": {
+ "prompt_tokens": 7,
+ "total_tokens": 7
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Utils/MockPipelineResponse.cs b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Utils/MockPipelineResponse.cs
new file mode 100644
index 000000000000..6fe18b9c1684
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Utils/MockPipelineResponse.cs
@@ -0,0 +1,156 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+/* Phase 01
+This class was imported and adapted from the System.ClientModel Unit Tests.
+https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/System.ClientModel/tests/TestFramework/Mocks/MockPipelineResponse.cs
+*/
+
+using System;
+using System.ClientModel.Primitives;
+using System.IO;
+using System.Text;
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace SemanticKernel.Connectors.OpenAI.UnitTests;
+
+public class MockPipelineResponse : PipelineResponse
+{
+ private int _status;
+ private string _reasonPhrase;
+ private Stream? _contentStream;
+ private BinaryData? _bufferedContent;
+
+ private readonly PipelineResponseHeaders _headers;
+
+ private bool _disposed;
+
+ public MockPipelineResponse(int status = 0, string reasonPhrase = "")
+ {
+ this._status = status;
+ this._reasonPhrase = reasonPhrase;
+ this._headers = new MockResponseHeaders();
+ }
+
+ public override int Status => this._status;
+
+ public void SetStatus(int value) => this._status = value;
+
+ public override string ReasonPhrase => this._reasonPhrase;
+
+ public void SetReasonPhrase(string value) => this._reasonPhrase = value;
+
+ public void SetContent(byte[] content)
+ {
+ this.ContentStream = new MemoryStream(content, 0, content.Length, false, true);
+ }
+
+ public MockPipelineResponse SetContent(string content)
+ {
+ this.SetContent(Encoding.UTF8.GetBytes(content));
+ return this;
+ }
+
+ public override Stream? ContentStream
+ {
+ get => this._contentStream;
+ set => this._contentStream = value;
+ }
+
+ public override BinaryData Content
+ {
+ get
+ {
+ if (this._contentStream is null)
+ {
+ return new BinaryData(Array.Empty());
+ }
+
+ if (this.ContentStream is not MemoryStream memoryContent)
+ {
+ throw new InvalidOperationException("The response is not buffered.");
+ }
+
+ if (memoryContent.TryGetBuffer(out ArraySegment segment))
+ {
+ return new BinaryData(segment.AsMemory());
+ }
+ return new BinaryData(memoryContent.ToArray());
+ }
+ }
+
+ protected override PipelineResponseHeaders HeadersCore
+ => this._headers;
+
+ public sealed override void Dispose()
+ {
+ this.Dispose(true);
+
+ GC.SuppressFinalize(this);
+ }
+
+ protected void Dispose(bool disposing)
+ {
+ if (disposing && !this._disposed)
+ {
+ Stream? content = this._contentStream;
+ if (content != null)
+ {
+ this._contentStream = null;
+ content.Dispose();
+ }
+
+ this._disposed = true;
+ }
+ }
+
+ public override BinaryData BufferContent(CancellationToken cancellationToken = default)
+ {
+ if (this._bufferedContent is not null)
+ {
+ return this._bufferedContent;
+ }
+
+ if (this._contentStream is null)
+ {
+ this._bufferedContent = new BinaryData(Array.Empty());
+ return this._bufferedContent;
+ }
+
+ MemoryStream bufferStream = new();
+ this._contentStream.CopyTo(bufferStream);
+ this._contentStream.Dispose();
+ this._contentStream = bufferStream;
+
+ // Less efficient FromStream method called here because it is a mock.
+ // For intended production implementation, see HttpClientTransportResponse.
+ this._bufferedContent = BinaryData.FromStream(bufferStream);
+ return this._bufferedContent;
+ }
+
+ public override async ValueTask BufferContentAsync(CancellationToken cancellationToken = default)
+ {
+ if (this._bufferedContent is not null)
+ {
+ return this._bufferedContent;
+ }
+
+ if (this._contentStream is null)
+ {
+ this._bufferedContent = new BinaryData(Array.Empty());
+ return this._bufferedContent;
+ }
+
+ MemoryStream bufferStream = new();
+
+ await this._contentStream.CopyToAsync(bufferStream, cancellationToken).ConfigureAwait(false);
+ await this._contentStream.DisposeAsync().ConfigureAwait(false);
+
+ this._contentStream = bufferStream;
+
+ // Less efficient FromStream method called here because it is a mock.
+ // For intended production implementation, see HttpClientTransportResponse.
+ this._bufferedContent = BinaryData.FromStream(bufferStream);
+ return this._bufferedContent;
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Utils/MockResponseHeaders.cs b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Utils/MockResponseHeaders.cs
new file mode 100644
index 000000000000..fceef64e4bae
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Utils/MockResponseHeaders.cs
@@ -0,0 +1,37 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+/* Phase 01
+This class was imported and adapted from the System.ClientModel Unit Tests.
+https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/System.ClientModel/tests/TestFramework/Mocks/MockResponseHeaders.cs
+*/
+
+using System;
+using System.ClientModel.Primitives;
+using System.Collections.Generic;
+
+namespace SemanticKernel.Connectors.OpenAI.UnitTests;
+
+public class MockResponseHeaders : PipelineResponseHeaders
+{
+ private readonly Dictionary _headers;
+
+ public MockResponseHeaders()
+ {
+ this._headers = new Dictionary();
+ }
+
+ public override IEnumerator> GetEnumerator()
+ {
+ throw new NotImplementedException();
+ }
+
+ public override bool TryGetValue(string name, out string? value)
+ {
+ return this._headers.TryGetValue(name, out value);
+ }
+
+ public override bool TryGetValues(string name, out IEnumerable? values)
+ {
+ throw new NotImplementedException();
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2/Connectors.OpenAIV2.csproj b/dotnet/src/Connectors/Connectors.OpenAIV2/Connectors.OpenAIV2.csproj
index d5e129765dc9..b17b14eb91ef 100644
--- a/dotnet/src/Connectors/Connectors.OpenAIV2/Connectors.OpenAIV2.csproj
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2/Connectors.OpenAIV2.csproj
@@ -30,5 +30,6 @@
+
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2/Core/ClientCore.Embeddings.cs b/dotnet/src/Connectors/Connectors.OpenAIV2/Core/ClientCore.Embeddings.cs
new file mode 100644
index 000000000000..d11e2799addd
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2/Core/ClientCore.Embeddings.cs
@@ -0,0 +1,64 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+/*
+Phase 01
+
+This class was created to simplify any Text Embeddings Support from the v1 ClientCore
+*/
+
+using System;
+using System.ClientModel;
+using System.Collections.Generic;
+using System.Threading;
+using System.Threading.Tasks;
+using OpenAI.Embeddings;
+
+#pragma warning disable CA2208 // Instantiate argument exceptions correctly
+
+namespace Microsoft.SemanticKernel.Connectors.OpenAI;
+
+///
+/// Base class for AI clients that provides common functionality for interacting with OpenAI services.
+///
+internal partial class ClientCore
+{
+ ///
+ /// Generates an embedding from the given .
+ ///
+ /// List of strings to generate embeddings for
+ /// The containing services, plugins, and other state for use throughout the operation.
+ /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models.
+ /// The to monitor for cancellation requests. The default is .
+ /// List of embeddings
+ internal async Task>> GetEmbeddingsAsync(
+ IList data,
+ Kernel? kernel,
+ int? dimensions,
+ CancellationToken cancellationToken)
+ {
+ var result = new List>(data.Count);
+
+ if (data.Count > 0)
+ {
+ var embeddingsOptions = new EmbeddingGenerationOptions()
+ {
+ Dimensions = dimensions
+ };
+
+ ClientResult response = await RunRequestAsync(() => this.Client.GetEmbeddingClient(this.ModelId).GenerateEmbeddingsAsync(data, embeddingsOptions, cancellationToken)).ConfigureAwait(false);
+ var embeddings = response.Value;
+
+ if (embeddings.Count != data.Count)
+ {
+ throw new KernelException($"Expected {data.Count} text embedding(s), but received {embeddings.Count}");
+ }
+
+ for (var i = 0; i < embeddings.Count; i++)
+ {
+ result.Add(embeddings[i].Vector);
+ }
+ }
+
+ return result;
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2/Core/ClientCore.cs b/dotnet/src/Connectors/Connectors.OpenAIV2/Core/ClientCore.cs
new file mode 100644
index 000000000000..12ca2f3d92fe
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2/Core/ClientCore.cs
@@ -0,0 +1,187 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+/*
+Phase 01 : This class was created adapting and merging ClientCore and OpenAIClientCore classes.
+System.ClientModel changes were added and adapted to the code as this package is now used as a dependency over OpenAI package.
+All logic from original ClientCore and OpenAIClientCore were preserved.
+*/
+
+using System;
+using System.ClientModel;
+using System.ClientModel.Primitives;
+using System.Collections.Generic;
+using System.Net.Http;
+using System.Runtime.CompilerServices;
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.Extensions.Logging;
+using Microsoft.Extensions.Logging.Abstractions;
+using Microsoft.SemanticKernel.Http;
+using OpenAI;
+
+#pragma warning disable CA2208 // Instantiate argument exceptions correctly
+
+namespace Microsoft.SemanticKernel.Connectors.OpenAI;
+
+///
+/// Base class for AI clients that provides common functionality for interacting with OpenAI services.
+///
+internal partial class ClientCore
+{
+ ///
+ /// Default OpenAI API endpoint.
+ ///
+ private const string OpenAIV1Endpoint = "https://api.openai.com/v1";
+
+ ///
+ /// Identifier of the default model to use
+ ///
+ internal string ModelId { get; init; } = string.Empty;
+
+ ///
+ /// Non-default endpoint for OpenAI API.
+ ///
+ internal Uri? Endpoint { get; init; }
+
+ ///
+ /// Logger instance
+ ///
+ internal ILogger Logger { get; init; }
+
+ ///
+ /// OpenAI / Azure OpenAI Client
+ ///
+ internal OpenAIClient Client { get; }
+
+ ///
+ /// Storage for AI service attributes.
+ ///
+ internal Dictionary Attributes { get; } = [];
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// Model name.
+ /// OpenAI API Key.
+ /// OpenAI compatible API endpoint.
+ /// OpenAI Organization Id (usually optional).
+ /// Custom for HTTP requests.
+ /// The to use for logging. If null, no logging will be performed.
+ internal ClientCore(
+ string modelId,
+ string? apiKey = null,
+ Uri? endpoint = null,
+ string? organizationId = null,
+ HttpClient? httpClient = null,
+ ILogger? logger = null)
+ {
+ Verify.NotNullOrWhiteSpace(modelId);
+
+ this.Logger = logger ?? NullLogger.Instance;
+ this.ModelId = modelId;
+
+ // Accepts the endpoint if provided, otherwise uses the default OpenAI endpoint.
+ this.Endpoint = endpoint ?? httpClient?.BaseAddress;
+ if (this.Endpoint is null)
+ {
+ Verify.NotNullOrWhiteSpace(apiKey); // For Public OpenAI Endpoint a key must be provided.
+ this.Endpoint = new Uri(OpenAIV1Endpoint);
+ }
+
+ var options = GetOpenAIClientOptions(httpClient, this.Endpoint);
+ if (!string.IsNullOrWhiteSpace(organizationId))
+ {
+ options.AddPolicy(new AddHeaderRequestPolicy("OpenAI-Organization", organizationId!), PipelinePosition.PerCall);
+ }
+
+ this.Client = new OpenAIClient(apiKey ?? string.Empty, options);
+ }
+
+ ///
+ /// Initializes a new instance of the class using the specified OpenAIClient.
+ /// Note: instances created this way might not have the default diagnostics settings,
+ /// it's up to the caller to configure the client.
+ ///
+ /// Azure OpenAI model ID or deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource
+ /// Custom .
+ /// The to use for logging. If null, no logging will be performed.
+ internal ClientCore(
+ string modelId,
+ OpenAIClient openAIClient,
+ ILogger? logger = null)
+ {
+ Verify.NotNullOrWhiteSpace(modelId);
+ Verify.NotNull(openAIClient);
+
+ this.Logger = logger ?? NullLogger.Instance;
+ this.ModelId = modelId;
+ this.Client = openAIClient;
+ }
+
+ ///
+ /// Logs OpenAI action details.
+ ///
+ /// Caller member name. Populated automatically by runtime.
+ internal void LogActionDetails([CallerMemberName] string? callerMemberName = default)
+ {
+ if (this.Logger.IsEnabled(LogLevel.Information))
+ {
+ this.Logger.LogInformation("Action: {Action}. OpenAI Model ID: {ModelId}.", callerMemberName, this.ModelId);
+ }
+ }
+
+ ///
+ /// Allows adding attributes to the client.
+ ///
+ /// Attribute key.
+ /// Attribute value.
+ internal void AddAttribute(string key, string? value)
+ {
+ if (!string.IsNullOrEmpty(value))
+ {
+ this.Attributes.Add(key, value);
+ }
+ }
+
+ /// Gets options to use for an OpenAIClient
+ /// Custom for HTTP requests.
+ /// Endpoint for the OpenAI API.
+ /// An instance of .
+ private static OpenAIClientOptions GetOpenAIClientOptions(HttpClient? httpClient, Uri? endpoint)
+ {
+ OpenAIClientOptions options = new()
+ {
+ ApplicationId = HttpHeaderConstant.Values.UserAgent,
+ Endpoint = endpoint
+ };
+
+ options.AddPolicy(new AddHeaderRequestPolicy(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(ClientCore))), PipelinePosition.PerCall);
+
+ if (httpClient is not null)
+ {
+ options.Transport = new HttpClientPipelineTransport(httpClient);
+ options.RetryPolicy = new ClientRetryPolicy(maxRetries: 0); // Disable retry policy if and only if a custom HttpClient is provided.
+ options.NetworkTimeout = Timeout.InfiniteTimeSpan; // Disable default timeout
+ }
+
+ return options;
+ }
+
+ ///
+ /// Invokes the specified request and handles exceptions.
+ ///
+ /// Type of the response.
+ /// Request to invoke.
+ /// Returns the response.
+ private static async Task RunRequestAsync(Func> request)
+ {
+ try
+ {
+ return await request.Invoke().ConfigureAwait(false);
+ }
+ catch (ClientResultException e)
+ {
+ throw e.ToHttpOperationException();
+ }
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2/Core/Models/AddHeaderRequestPolicy.cs b/dotnet/src/Connectors/Connectors.OpenAIV2/Core/Models/AddHeaderRequestPolicy.cs
new file mode 100644
index 000000000000..2279d639c54e
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2/Core/Models/AddHeaderRequestPolicy.cs
@@ -0,0 +1,23 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+/* Phase 1
+Added from OpenAI v1 with adapted logic to the System.ClientModel abstraction
+*/
+
+using System.ClientModel.Primitives;
+
+namespace Microsoft.SemanticKernel.Connectors.OpenAI;
+
+///
+/// Helper class to inject headers into System ClientModel Http pipeline
+///
+internal sealed class AddHeaderRequestPolicy(string headerName, string headerValue) : PipelineSynchronousPolicy
+{
+ private readonly string _headerName = headerName;
+ private readonly string _headerValue = headerValue;
+
+ public override void OnSendingRequest(PipelineMessage message)
+ {
+ message.Request.Headers.Add(this._headerName, this._headerValue);
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2/Core/Models/PipelineSynchronousPolicy.cs b/dotnet/src/Connectors/Connectors.OpenAIV2/Core/Models/PipelineSynchronousPolicy.cs
new file mode 100644
index 000000000000..b7690ead8b7f
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2/Core/Models/PipelineSynchronousPolicy.cs
@@ -0,0 +1,89 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+/*
+Phase 1
+As SystemClient model does not have any specialization or extension ATM, introduced this class with the adapted to use System.ClientModel abstractions.
+https://github.com/Azure/azure-sdk-for-net/blob/8bd22837639d54acccc820e988747f8d28bbde4a/sdk/core/Azure.Core/src/Pipeline/HttpPipelineSynchronousPolicy.cs
+*/
+
+using System;
+using System.ClientModel.Primitives;
+using System.Collections.Generic;
+using System.Reflection;
+using System.Threading.Tasks;
+
+namespace Microsoft.SemanticKernel.Connectors.OpenAI;
+
+///
+/// Represents a that doesn't do any asynchronous or synchronously blocking operations.
+///
+internal class PipelineSynchronousPolicy : PipelinePolicy
+{
+ private static readonly Type[] s_onReceivedResponseParameters = new[] { typeof(PipelineMessage) };
+
+ private readonly bool _hasOnReceivedResponse = true;
+
+ ///
+ /// Initializes a new instance of
+ ///
+ protected PipelineSynchronousPolicy()
+ {
+ var onReceivedResponseMethod = this.GetType().GetMethod(nameof(OnReceivedResponse), BindingFlags.Instance | BindingFlags.Public, null, s_onReceivedResponseParameters, null);
+ if (onReceivedResponseMethod != null)
+ {
+ this._hasOnReceivedResponse = onReceivedResponseMethod.GetBaseDefinition().DeclaringType != onReceivedResponseMethod.DeclaringType;
+ }
+ }
+
+ ///
+ public override void Process(PipelineMessage message, IReadOnlyList pipeline, int currentIndex)
+ {
+ this.OnSendingRequest(message);
+ if (pipeline.Count > currentIndex + 1)
+ {
+ // If there are more policies in the pipeline, continue processing
+ ProcessNext(message, pipeline, currentIndex);
+ }
+ this.OnReceivedResponse(message);
+ }
+
+ ///
+ public override ValueTask ProcessAsync(PipelineMessage message, IReadOnlyList pipeline, int currentIndex)
+ {
+ if (!this._hasOnReceivedResponse)
+ {
+ // If OnReceivedResponse was not overridden we can avoid creating a state machine and return the task directly
+ this.OnSendingRequest(message);
+ if (pipeline.Count > currentIndex + 1)
+ {
+ // If there are more policies in the pipeline, continue processing
+ return ProcessNextAsync(message, pipeline, currentIndex);
+ }
+ }
+
+ return this.InnerProcessAsync(message, pipeline, currentIndex);
+ }
+
+ private async ValueTask InnerProcessAsync(PipelineMessage message, IReadOnlyList pipeline, int currentIndex)
+ {
+ this.OnSendingRequest(message);
+ if (pipeline.Count > currentIndex + 1)
+ {
+ // If there are more policies in the pipeline, continue processing
+ await ProcessNextAsync(message, pipeline, currentIndex).ConfigureAwait(false);
+ }
+ this.OnReceivedResponse(message);
+ }
+
+ ///
+ /// Method is invoked before the request is sent.
+ ///
+ /// The containing the request.
+ public virtual void OnSendingRequest(PipelineMessage message) { }
+
+ ///
+ /// Method is invoked after the response is received.
+ ///
+ /// The containing the response.
+ public virtual void OnReceivedResponse(PipelineMessage message) { }
+}
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2/Extensions/ClientResultExceptionExtensions.cs b/dotnet/src/Connectors/Connectors.OpenAIV2/Extensions/ClientResultExceptionExtensions.cs
new file mode 100644
index 000000000000..7da92e5826ba
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2/Extensions/ClientResultExceptionExtensions.cs
@@ -0,0 +1,44 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+/*
+Phase 01:
+This class is introduced in exchange for the original RequestExceptionExtensions class of Azure.Core to the new ClientException from System.ClientModel,
+Preserved the logic as is.
+*/
+
+using System.ClientModel;
+using System.Net;
+
+namespace Microsoft.SemanticKernel.Connectors.OpenAI;
+
+///
+/// Provides extension methods for the class.
+///
+internal static class ClientResultExceptionExtensions
+{
+ ///
+ /// Converts a to an .
+ ///
+ /// The original .
+ /// An instance.
+ public static HttpOperationException ToHttpOperationException(this ClientResultException exception)
+ {
+ const int NoResponseReceived = 0;
+
+ string? responseContent = null;
+
+ try
+ {
+ responseContent = exception.GetRawResponse()?.Content.ToString();
+ }
+#pragma warning disable CA1031 // Do not catch general exception types
+ catch { } // We want to suppress any exceptions that occur while reading the content, ensuring that an HttpOperationException is thrown instead.
+#pragma warning restore CA1031
+
+ return new HttpOperationException(
+ exception.Status == NoResponseReceived ? null : (HttpStatusCode?)exception.Status,
+ responseContent,
+ exception.Message,
+ exception);
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2/Services/OpenAITextEmbbedingGenerationService.cs b/dotnet/src/Connectors/Connectors.OpenAIV2/Services/OpenAITextEmbbedingGenerationService.cs
new file mode 100644
index 000000000000..49915031b7fc
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2/Services/OpenAITextEmbbedingGenerationService.cs
@@ -0,0 +1,85 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
+using System.Net.Http;
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.Extensions.Logging;
+using Microsoft.SemanticKernel.Embeddings;
+using Microsoft.SemanticKernel.Services;
+using OpenAI;
+
+namespace Microsoft.SemanticKernel.Connectors.OpenAI;
+
+///
+/// OpenAI implementation of
+///
+[Experimental("SKEXP0010")]
+public sealed class OpenAITextEmbeddingGenerationService : ITextEmbeddingGenerationService
+{
+ private readonly ClientCore _core;
+ private readonly int? _dimensions;
+
+ ///
+ /// Create an instance of
+ ///
+ /// Model name
+ /// OpenAI API Key
+ /// OpenAI Organization Id (usually optional)
+ /// Custom for HTTP requests.
+ /// The to use for logging. If null, no logging will be performed.
+ /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models.
+ public OpenAITextEmbeddingGenerationService(
+ string modelId,
+ string apiKey,
+ string? organization = null,
+ HttpClient? httpClient = null,
+ ILoggerFactory? loggerFactory = null,
+ int? dimensions = null)
+ {
+ this._core = new(
+ modelId: modelId,
+ apiKey: apiKey,
+ organizationId: organization,
+ httpClient: httpClient,
+ logger: loggerFactory?.CreateLogger(typeof(OpenAITextEmbeddingGenerationService)));
+
+ this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId);
+
+ this._dimensions = dimensions;
+ }
+
+ ///
+ /// Create an instance of the OpenAI text embedding connector
+ ///
+ /// Model name
+ /// Custom for HTTP requests.
+ /// The to use for logging. If null, no logging will be performed.
+ /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models.
+ public OpenAITextEmbeddingGenerationService(
+ string modelId,
+ OpenAIClient openAIClient,
+ ILoggerFactory? loggerFactory = null,
+ int? dimensions = null)
+ {
+ this._core = new(modelId, openAIClient, loggerFactory?.CreateLogger(typeof(OpenAITextEmbeddingGenerationService)));
+ this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId);
+
+ this._dimensions = dimensions;
+ }
+
+ ///
+ public IReadOnlyDictionary Attributes => this._core.Attributes;
+
+ ///
+ public Task>> GenerateEmbeddingsAsync(
+ IList data,
+ Kernel? kernel = null,
+ CancellationToken cancellationToken = default)
+ {
+ this._core.LogActionDetails();
+ return this._core.GetEmbeddingsAsync(data, kernel, this._dimensions, cancellationToken);
+ }
+}
diff --git a/dotnet/src/IntegrationTestsV2/.editorconfig b/dotnet/src/IntegrationTestsV2/.editorconfig
new file mode 100644
index 000000000000..394eef685f21
--- /dev/null
+++ b/dotnet/src/IntegrationTestsV2/.editorconfig
@@ -0,0 +1,6 @@
+# Suppressing errors for Test projects under dotnet folder
+[*.cs]
+dotnet_diagnostic.CA2007.severity = none # Do not directly await a Task
+dotnet_diagnostic.VSTHRD111.severity = none # Use .ConfigureAwait(bool) is hidden by default, set to none to prevent IDE from changing on autosave
+dotnet_diagnostic.CS1591.severity = none # Missing XML comment for publicly visible type or member
+dotnet_diagnostic.IDE1006.severity = warning # Naming rule violations
diff --git a/dotnet/src/IntegrationTestsV2/Connectors/OpenAI/OpenAITextEmbeddingTests.cs b/dotnet/src/IntegrationTestsV2/Connectors/OpenAI/OpenAITextEmbeddingTests.cs
new file mode 100644
index 000000000000..6eca1909a546
--- /dev/null
+++ b/dotnet/src/IntegrationTestsV2/Connectors/OpenAI/OpenAITextEmbeddingTests.cs
@@ -0,0 +1,63 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Threading.Tasks;
+using Microsoft.Extensions.Configuration;
+using Microsoft.SemanticKernel.Connectors.OpenAI;
+using Microsoft.SemanticKernel.Embeddings;
+using SemanticKernel.IntegrationTests.TestSettings;
+using Xunit;
+
+namespace SemanticKernel.IntegrationTests.Connectors.OpenAI;
+
+public sealed class OpenAITextEmbeddingTests
+{
+ private const int AdaVectorLength = 1536;
+ private readonly IConfigurationRoot _configuration = new ConfigurationBuilder()
+ .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true)
+ .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true)
+ .AddEnvironmentVariables()
+ .AddUserSecrets()
+ .Build();
+
+ [Theory]//(Skip = "OpenAI will often throttle requests. This test is for manual verification.")]
+ [InlineData("test sentence")]
+ public async Task OpenAITestAsync(string testInputString)
+ {
+ // Arrange
+ OpenAIConfiguration? openAIConfiguration = this._configuration.GetSection("OpenAIEmbeddings").Get();
+ Assert.NotNull(openAIConfiguration);
+
+ var embeddingGenerator = new OpenAITextEmbeddingGenerationService(openAIConfiguration.ModelId, openAIConfiguration.ApiKey);
+
+ // Act
+ var singleResult = await embeddingGenerator.GenerateEmbeddingAsync(testInputString);
+ var batchResult = await embeddingGenerator.GenerateEmbeddingsAsync([testInputString, testInputString, testInputString]);
+
+ // Assert
+ Assert.Equal(AdaVectorLength, singleResult.Length);
+ Assert.Equal(3, batchResult.Count);
+ }
+
+ [Theory]//(Skip = "OpenAI will often throttle requests. This test is for manual verification.")]
+ [InlineData(null, 3072)]
+ [InlineData(1024, 1024)]
+ public async Task OpenAIWithDimensionsAsync(int? dimensions, int expectedVectorLength)
+ {
+ // Arrange
+ const string TestInputString = "test sentence";
+
+ OpenAIConfiguration? openAIConfiguration = this._configuration.GetSection("OpenAIEmbeddings").Get();
+ Assert.NotNull(openAIConfiguration);
+
+ var embeddingGenerator = new OpenAITextEmbeddingGenerationService(
+ "text-embedding-3-large",
+ openAIConfiguration.ApiKey,
+ dimensions: dimensions);
+
+ // Act
+ var result = await embeddingGenerator.GenerateEmbeddingAsync(TestInputString);
+
+ // Assert
+ Assert.Equal(expectedVectorLength, result.Length);
+ }
+}
diff --git a/dotnet/src/IntegrationTestsV2/IntegrationTestsV2.csproj b/dotnet/src/IntegrationTestsV2/IntegrationTestsV2.csproj
index cbfbfe9e4df3..f3c704a27307 100644
--- a/dotnet/src/IntegrationTestsV2/IntegrationTestsV2.csproj
+++ b/dotnet/src/IntegrationTestsV2/IntegrationTestsV2.csproj
@@ -1,7 +1,7 @@
-
+
IntegrationTests
- SemanticKernel.IntegrationTests
+ SemanticKernel.IntegrationTestsV2
net8.0
true
false
@@ -16,7 +16,7 @@
-
+
@@ -44,7 +44,6 @@
-
@@ -64,4 +63,5 @@
Always
+
\ No newline at end of file
diff --git a/dotnet/src/IntegrationTestsV2/TestSettings/OpenAIConfiguration.cs b/dotnet/src/IntegrationTestsV2/TestSettings/OpenAIConfiguration.cs
new file mode 100644
index 000000000000..cb3884e3bdfc
--- /dev/null
+++ b/dotnet/src/IntegrationTestsV2/TestSettings/OpenAIConfiguration.cs
@@ -0,0 +1,15 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Diagnostics.CodeAnalysis;
+
+namespace SemanticKernel.IntegrationTests.TestSettings;
+
+[SuppressMessage("Performance", "CA1812:Internal class that is apparently never instantiated",
+ Justification = "Configuration classes are instantiated through IConfiguration.")]
+internal sealed class OpenAIConfiguration(string serviceId, string modelId, string apiKey, string? chatModelId = null)
+{
+ public string ServiceId { get; set; } = serviceId;
+ public string ModelId { get; set; } = modelId;
+ public string? ChatModelId { get; set; } = chatModelId;
+ public string ApiKey { get; set; } = apiKey;
+}
From 6729af13a4909ac40ce9a0272b1cc2b67b8329e8 Mon Sep 17 00:00:00 2001
From: SergeyMenshykh <68852919+SergeyMenshykh@users.noreply.github.com>
Date: Mon, 24 Jun 2024 02:28:03 -0700
Subject: [PATCH 007/226] .Net: Copy code related to
AzureChatCompletionSeervice from Connectors.OpenAI to Connectors.AzureOpenAI
(#6906)
### Motivation and Context
As a first step in migrating AzureOpenAIConnector to Azure AI SDK v2,
all code related to AzureOpenAIChatCompletionService, including unit
tests, is copied from the Connectors.OpenAI project to the
Connectors.AzureOpenAI project as-is, with only the structural
modifications described below and no logical modifications. This is a
preparatory step before refactoring the AzureOpenAIChatCompletionService
to use Azure SDK v2.
### Description
This PR does the following:
1. Copies the AzureOpenAIChatCompletionService class and all its
dependencies to the Connectors.AzureOpenAI project as they are, with no
code changes.
2. Copies all existing unit tests related to the
AzureOpenAIChatCompletionService service and its dependencies to the
Connectors.AzureOpenAI.UnitTests project.
3. Renames some files in the Connectors.AzureOpenAI project so that
their names begin with AzureOpenAI instead of OpenAI.
4. Changes namespaces in the copied files from
Microsoft.SemanticKernel.Connectors.OpenAI to
Microsoft.SemanticKernel.Connectors.AzureOpenAI.
Related to the "Move reusable code from existing
Microsoft.SemanticKernel.Connectors.OpenAI project to the new project"
task of
the https://github.com/microsoft/semantic-kernel/issues/6864 issue.
### Contribution Checklist
- [x] The code builds clean without any errors or warnings
- [x] The PR follows the [SK Contribution
Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md)
and the [pre-submission formatting
script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts)
raises no violations
- [x] All unit tests pass, and I have added new tests where possible
- [x] I didn't break anyone :smile:
---
.../.editorconfig | 6 +
...AzureOpenAIPromptExecutionSettingsTests.cs | 274 +++
.../AzureOpenAITestHelper.cs | 20 +
.../AzureToolCallBehaviorTests.cs | 248 +++
.../AzureOpenAIChatCompletionServiceTests.cs | 958 ++++++++++
.../ChatHistoryExtensionsTests.cs | 45 +
.../Connectors.AzureOpenAI.UnitTests.csproj | 6 +
.../AzureOpenAIChatMessageContentTests.cs | 124 ++
.../Core/AzureOpenAIFunctionToolCallTests.cs | 81 +
...reOpenAIPluginCollectionExtensionsTests.cs | 75 +
.../AzureOpenAIStreamingTextContentTests.cs | 41 +
.../RequestFailedExceptionExtensionsTests.cs | 77 +
.../AutoFunctionInvocationFilterTests.cs | 629 +++++++
.../AzureOpenAIFunctionTests.cs | 188 ++
.../KernelFunctionMetadataExtensionsTests.cs | 256 +++
.../MultipleHttpMessageHandlerStub.cs | 53 +
...multiple_function_calls_test_response.json | 64 +
...on_single_function_call_test_response.json | 32 +
..._multiple_function_calls_test_response.txt | 9 +
...ing_single_function_call_test_response.txt | 3 +
...hat_completion_streaming_test_response.txt | 5 +
.../chat_completion_test_response.json | 22 +
...tion_with_data_streaming_test_response.txt | 1 +
...at_completion_with_data_test_response.json | 28 +
...multiple_function_calls_test_response.json | 40 +
..._multiple_function_calls_test_response.txt | 5 +
...ext_completion_streaming_test_response.txt | 3 +
.../text_completion_test_response.json | 19 +
.../AddHeaderRequestPolicy.cs | 20 +
.../AzureOpenAIPromptExecutionSettings.cs | 432 +++++
.../AzureToolCallBehavior.cs | 269 +++
.../AzureOpenAIChatCompletionService.cs | 102 ++
.../ChatHistoryExtensions.cs | 70 +
.../Connectors.AzureOpenAI.csproj | 2 +-
.../Core/AzureOpenAIChatMessageContent.cs | 117 ++
.../Core/AzureOpenAIClientCore.cs | 102 ++
.../Core/AzureOpenAIFunction.cs | 178 ++
.../Core/AzureOpenAIFunctionToolCall.cs | 170 ++
...eOpenAIKernelFunctionMetadataExtensions.cs | 54 +
.../AzureOpenAIPluginCollectionExtensions.cs | 62 +
.../AzureOpenAIStreamingChatMessageContent.cs | 87 +
.../Core/AzureOpenAIStreamingTextContent.cs | 51 +
.../Connectors.AzureOpenAI/Core/ClientCore.cs | 1574 +++++++++++++++++
.../RequestFailedExceptionExtensions.cs | 38 +
44 files changed, 6639 insertions(+), 1 deletion(-)
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/.editorconfig
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureOpenAIPromptExecutionSettingsTests.cs
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureOpenAITestHelper.cs
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureToolCallBehaviorTests.cs
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/ChatCompletion/AzureOpenAIChatCompletionServiceTests.cs
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/ChatHistoryExtensionsTests.cs
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureOpenAIChatMessageContentTests.cs
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureOpenAIFunctionToolCallTests.cs
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureOpenAIPluginCollectionExtensionsTests.cs
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureOpenAIStreamingTextContentTests.cs
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/RequestFailedExceptionExtensionsTests.cs
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/FunctionCalling/AutoFunctionInvocationFilterTests.cs
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/FunctionCalling/AzureOpenAIFunctionTests.cs
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/FunctionCalling/KernelFunctionMetadataExtensionsTests.cs
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/MultipleHttpMessageHandlerStub.cs
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_multiple_function_calls_test_response.json
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_single_function_call_test_response.json
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_multiple_function_calls_test_response.txt
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_single_function_call_test_response.txt
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_test_response.txt
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_test_response.json
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_with_data_streaming_test_response.txt
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_with_data_test_response.json
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/filters_multiple_function_calls_test_response.json
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/filters_streaming_multiple_function_calls_test_response.txt
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text_completion_streaming_test_response.txt
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text_completion_test_response.json
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI/AddHeaderRequestPolicy.cs
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI/AzureOpenAIPromptExecutionSettings.cs
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI/AzureToolCallBehavior.cs
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI/ChatCompletion/AzureOpenAIChatCompletionService.cs
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI/ChatHistoryExtensions.cs
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIChatMessageContent.cs
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIClientCore.cs
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIFunction.cs
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIFunctionToolCall.cs
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIKernelFunctionMetadataExtensions.cs
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIPluginCollectionExtensions.cs
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIStreamingChatMessageContent.cs
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIStreamingTextContent.cs
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI/Core/ClientCore.cs
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI/RequestFailedExceptionExtensions.cs
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/.editorconfig b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/.editorconfig
new file mode 100644
index 000000000000..394eef685f21
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/.editorconfig
@@ -0,0 +1,6 @@
+# Suppressing errors for Test projects under dotnet folder
+[*.cs]
+dotnet_diagnostic.CA2007.severity = none # Do not directly await a Task
+dotnet_diagnostic.VSTHRD111.severity = none # Use .ConfigureAwait(bool) is hidden by default, set to none to prevent IDE from changing on autosave
+dotnet_diagnostic.CS1591.severity = none # Missing XML comment for publicly visible type or member
+dotnet_diagnostic.IDE1006.severity = warning # Naming rule violations
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureOpenAIPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureOpenAIPromptExecutionSettingsTests.cs
new file mode 100644
index 000000000000..0cf1c4e2a9e3
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureOpenAIPromptExecutionSettingsTests.cs
@@ -0,0 +1,274 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using System.Text.Json;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+
+namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests;
+
+///
+/// Unit tests of AzureOpenAIPromptExecutionSettingsTests
+///
+public class AzureOpenAIPromptExecutionSettingsTests
+{
+ [Fact]
+ public void ItCreatesOpenAIExecutionSettingsWithCorrectDefaults()
+ {
+ // Arrange
+ // Act
+ AzureOpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(null, 128);
+
+ // Assert
+ Assert.NotNull(executionSettings);
+ Assert.Equal(1, executionSettings.Temperature);
+ Assert.Equal(1, executionSettings.TopP);
+ Assert.Equal(0, executionSettings.FrequencyPenalty);
+ Assert.Equal(0, executionSettings.PresencePenalty);
+ Assert.Equal(1, executionSettings.ResultsPerPrompt);
+ Assert.Null(executionSettings.StopSequences);
+ Assert.Null(executionSettings.TokenSelectionBiases);
+ Assert.Null(executionSettings.TopLogprobs);
+ Assert.Null(executionSettings.Logprobs);
+ Assert.Null(executionSettings.AzureChatExtensionsOptions);
+ Assert.Equal(128, executionSettings.MaxTokens);
+ }
+
+ [Fact]
+ public void ItUsesExistingOpenAIExecutionSettings()
+ {
+ // Arrange
+ AzureOpenAIPromptExecutionSettings actualSettings = new()
+ {
+ Temperature = 0.7,
+ TopP = 0.7,
+ FrequencyPenalty = 0.7,
+ PresencePenalty = 0.7,
+ ResultsPerPrompt = 2,
+ StopSequences = new string[] { "foo", "bar" },
+ ChatSystemPrompt = "chat system prompt",
+ MaxTokens = 128,
+ Logprobs = true,
+ TopLogprobs = 5,
+ TokenSelectionBiases = new Dictionary() { { 1, 2 }, { 3, 4 } },
+ };
+
+ // Act
+ AzureOpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(actualSettings);
+
+ // Assert
+ Assert.NotNull(executionSettings);
+ Assert.Equal(actualSettings, executionSettings);
+ }
+
+ [Fact]
+ public void ItCanUseOpenAIExecutionSettings()
+ {
+ // Arrange
+ PromptExecutionSettings actualSettings = new()
+ {
+ ExtensionData = new Dictionary() {
+ { "max_tokens", 1000 },
+ { "temperature", 0 }
+ }
+ };
+
+ // Act
+ AzureOpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(actualSettings, null);
+
+ // Assert
+ Assert.NotNull(executionSettings);
+ Assert.Equal(1000, executionSettings.MaxTokens);
+ Assert.Equal(0, executionSettings.Temperature);
+ }
+
+ [Fact]
+ public void ItCreatesOpenAIExecutionSettingsFromExtraPropertiesSnakeCase()
+ {
+ // Arrange
+ PromptExecutionSettings actualSettings = new()
+ {
+ ExtensionData = new Dictionary()
+ {
+ { "temperature", 0.7 },
+ { "top_p", 0.7 },
+ { "frequency_penalty", 0.7 },
+ { "presence_penalty", 0.7 },
+ { "results_per_prompt", 2 },
+ { "stop_sequences", new [] { "foo", "bar" } },
+ { "chat_system_prompt", "chat system prompt" },
+ { "max_tokens", 128 },
+ { "token_selection_biases", new Dictionary() { { 1, 2 }, { 3, 4 } } },
+ { "seed", 123456 },
+ { "logprobs", true },
+ { "top_logprobs", 5 },
+ }
+ };
+
+ // Act
+ AzureOpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(actualSettings, null);
+
+ // Assert
+ AssertExecutionSettings(executionSettings);
+ }
+
+ [Fact]
+ public void ItCreatesOpenAIExecutionSettingsFromExtraPropertiesAsStrings()
+ {
+ // Arrange
+ PromptExecutionSettings actualSettings = new()
+ {
+ ExtensionData = new Dictionary()
+ {
+ { "temperature", "0.7" },
+ { "top_p", "0.7" },
+ { "frequency_penalty", "0.7" },
+ { "presence_penalty", "0.7" },
+ { "results_per_prompt", "2" },
+ { "stop_sequences", new [] { "foo", "bar" } },
+ { "chat_system_prompt", "chat system prompt" },
+ { "max_tokens", "128" },
+ { "token_selection_biases", new Dictionary() { { "1", "2" }, { "3", "4" } } },
+ { "seed", 123456 },
+ { "logprobs", true },
+ { "top_logprobs", 5 }
+ }
+ };
+
+ // Act
+ AzureOpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(actualSettings, null);
+
+ // Assert
+ AssertExecutionSettings(executionSettings);
+ }
+
+ [Fact]
+ public void ItCreatesOpenAIExecutionSettingsFromJsonSnakeCase()
+ {
+ // Arrange
+ var json = """
+ {
+ "temperature": 0.7,
+ "top_p": 0.7,
+ "frequency_penalty": 0.7,
+ "presence_penalty": 0.7,
+ "results_per_prompt": 2,
+ "stop_sequences": [ "foo", "bar" ],
+ "chat_system_prompt": "chat system prompt",
+ "token_selection_biases": { "1": 2, "3": 4 },
+ "max_tokens": 128,
+ "seed": 123456,
+ "logprobs": true,
+ "top_logprobs": 5
+ }
+ """;
+ var actualSettings = JsonSerializer.Deserialize(json);
+
+ // Act
+ AzureOpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(actualSettings);
+
+ // Assert
+ AssertExecutionSettings(executionSettings);
+ }
+
+ [Theory]
+ [InlineData("", "")]
+ [InlineData("System prompt", "System prompt")]
+ public void ItUsesCorrectChatSystemPrompt(string chatSystemPrompt, string expectedChatSystemPrompt)
+ {
+ // Arrange & Act
+ var settings = new AzureOpenAIPromptExecutionSettings { ChatSystemPrompt = chatSystemPrompt };
+
+ // Assert
+ Assert.Equal(expectedChatSystemPrompt, settings.ChatSystemPrompt);
+ }
+
+ [Fact]
+ public void PromptExecutionSettingsCloneWorksAsExpected()
+ {
+ // Arrange
+ string configPayload = """
+ {
+ "max_tokens": 60,
+ "temperature": 0.5,
+ "top_p": 0.0,
+ "presence_penalty": 0.0,
+ "frequency_penalty": 0.0
+ }
+ """;
+ var executionSettings = JsonSerializer.Deserialize(configPayload);
+
+ // Act
+ var clone = executionSettings!.Clone();
+
+ // Assert
+ Assert.NotNull(clone);
+ Assert.Equal(executionSettings.ModelId, clone.ModelId);
+ Assert.Equivalent(executionSettings.ExtensionData, clone.ExtensionData);
+ }
+
+ [Fact]
+ public void PromptExecutionSettingsFreezeWorksAsExpected()
+ {
+ // Arrange
+ string configPayload = """
+ {
+ "max_tokens": 60,
+ "temperature": 0.5,
+ "top_p": 0.0,
+ "presence_penalty": 0.0,
+ "frequency_penalty": 0.0,
+ "stop_sequences": [ "DONE" ],
+ "token_selection_biases": { "1": 2, "3": 4 }
+ }
+ """;
+ var executionSettings = JsonSerializer.Deserialize(configPayload);
+
+ // Act
+ executionSettings!.Freeze();
+
+ // Assert
+ Assert.True(executionSettings.IsFrozen);
+ Assert.Throws(() => executionSettings.ModelId = "gpt-4");
+ Assert.Throws(() => executionSettings.ResultsPerPrompt = 2);
+ Assert.Throws(() => executionSettings.Temperature = 1);
+ Assert.Throws(() => executionSettings.TopP = 1);
+ Assert.Throws(() => executionSettings.StopSequences?.Add("STOP"));
+ Assert.Throws(() => executionSettings.TokenSelectionBiases?.Add(5, 6));
+
+ executionSettings!.Freeze(); // idempotent
+ Assert.True(executionSettings.IsFrozen);
+ }
+
+ [Fact]
+ public void FromExecutionSettingsWithDataDoesNotIncludeEmptyStopSequences()
+ {
+ // Arrange
+ var executionSettings = new AzureOpenAIPromptExecutionSettings { StopSequences = [] };
+
+ // Act
+#pragma warning disable CS0618 // AzureOpenAIChatCompletionWithData is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions
+ var executionSettingsWithData = AzureOpenAIPromptExecutionSettings.FromExecutionSettingsWithData(executionSettings);
+#pragma warning restore CS0618
+ // Assert
+ Assert.Null(executionSettingsWithData.StopSequences);
+ }
+
+ private static void AssertExecutionSettings(AzureOpenAIPromptExecutionSettings executionSettings)
+ {
+ Assert.NotNull(executionSettings);
+ Assert.Equal(0.7, executionSettings.Temperature);
+ Assert.Equal(0.7, executionSettings.TopP);
+ Assert.Equal(0.7, executionSettings.FrequencyPenalty);
+ Assert.Equal(0.7, executionSettings.PresencePenalty);
+ Assert.Equal(2, executionSettings.ResultsPerPrompt);
+ Assert.Equal(new string[] { "foo", "bar" }, executionSettings.StopSequences);
+ Assert.Equal("chat system prompt", executionSettings.ChatSystemPrompt);
+ Assert.Equal(new Dictionary() { { 1, 2 }, { 3, 4 } }, executionSettings.TokenSelectionBiases);
+ Assert.Equal(128, executionSettings.MaxTokens);
+ Assert.Equal(123456, executionSettings.Seed);
+ Assert.Equal(true, executionSettings.Logprobs);
+ Assert.Equal(5, executionSettings.TopLogprobs);
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureOpenAITestHelper.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureOpenAITestHelper.cs
new file mode 100644
index 000000000000..9df4aae40c2d
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureOpenAITestHelper.cs
@@ -0,0 +1,20 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.IO;
+
+namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests;
+
+///
+/// Helper for AzureOpenAI test purposes.
+///
+internal static class AzureOpenAITestHelper
+{
+ ///
+ /// Reads test response from file for mocking purposes.
+ ///
+ /// Name of the file with test response.
+ internal static string GetTestResponse(string fileName)
+ {
+ return File.ReadAllText($"./TestData/{fileName}");
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureToolCallBehaviorTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureToolCallBehaviorTests.cs
new file mode 100644
index 000000000000..525dabcd26d2
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureToolCallBehaviorTests.cs
@@ -0,0 +1,248 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Collections.Generic;
+using System.Linq;
+using Azure.AI.OpenAI;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+using static Microsoft.SemanticKernel.Connectors.AzureOpenAI.AzureToolCallBehavior;
+
+namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests;
+
+///
+/// Unit tests for
+///
+public sealed class AzureToolCallBehaviorTests
+{
+ [Fact]
+ public void EnableKernelFunctionsReturnsCorrectKernelFunctionsInstance()
+ {
+ // Arrange & Act
+ var behavior = AzureToolCallBehavior.EnableKernelFunctions;
+
+ // Assert
+ Assert.IsType(behavior);
+ Assert.Equal(0, behavior.MaximumAutoInvokeAttempts);
+ }
+
+ [Fact]
+ public void AutoInvokeKernelFunctionsReturnsCorrectKernelFunctionsInstance()
+ {
+ // Arrange & Act
+ const int DefaultMaximumAutoInvokeAttempts = 128;
+ var behavior = AzureToolCallBehavior.AutoInvokeKernelFunctions;
+
+ // Assert
+ Assert.IsType(behavior);
+ Assert.Equal(DefaultMaximumAutoInvokeAttempts, behavior.MaximumAutoInvokeAttempts);
+ }
+
+ [Fact]
+ public void EnableFunctionsReturnsEnabledFunctionsInstance()
+ {
+ // Arrange & Act
+ List functions = [new("Plugin", "Function", "description", [], null)];
+ var behavior = AzureToolCallBehavior.EnableFunctions(functions);
+
+ // Assert
+ Assert.IsType(behavior);
+ }
+
+ [Fact]
+ public void RequireFunctionReturnsRequiredFunctionInstance()
+ {
+ // Arrange & Act
+ var behavior = AzureToolCallBehavior.RequireFunction(new("Plugin", "Function", "description", [], null));
+
+ // Assert
+ Assert.IsType(behavior);
+ }
+
+ [Fact]
+ public void KernelFunctionsConfigureOptionsWithNullKernelDoesNotAddTools()
+ {
+ // Arrange
+ var kernelFunctions = new KernelFunctions(autoInvoke: false);
+ var chatCompletionsOptions = new ChatCompletionsOptions();
+
+ // Act
+ kernelFunctions.ConfigureOptions(null, chatCompletionsOptions);
+
+ // Assert
+ Assert.Empty(chatCompletionsOptions.Tools);
+ }
+
+ [Fact]
+ public void KernelFunctionsConfigureOptionsWithoutFunctionsDoesNotAddTools()
+ {
+ // Arrange
+ var kernelFunctions = new KernelFunctions(autoInvoke: false);
+ var chatCompletionsOptions = new ChatCompletionsOptions();
+ var kernel = Kernel.CreateBuilder().Build();
+
+ // Act
+ kernelFunctions.ConfigureOptions(kernel, chatCompletionsOptions);
+
+ // Assert
+ Assert.Null(chatCompletionsOptions.ToolChoice);
+ Assert.Empty(chatCompletionsOptions.Tools);
+ }
+
+ [Fact]
+ public void KernelFunctionsConfigureOptionsWithFunctionsAddsTools()
+ {
+ // Arrange
+ var kernelFunctions = new KernelFunctions(autoInvoke: false);
+ var chatCompletionsOptions = new ChatCompletionsOptions();
+ var kernel = Kernel.CreateBuilder().Build();
+
+ var plugin = this.GetTestPlugin();
+
+ kernel.Plugins.Add(plugin);
+
+ // Act
+ kernelFunctions.ConfigureOptions(kernel, chatCompletionsOptions);
+
+ // Assert
+ Assert.Equal(ChatCompletionsToolChoice.Auto, chatCompletionsOptions.ToolChoice);
+
+ this.AssertTools(chatCompletionsOptions);
+ }
+
+ [Fact]
+ public void EnabledFunctionsConfigureOptionsWithoutFunctionsDoesNotAddTools()
+ {
+ // Arrange
+ var enabledFunctions = new EnabledFunctions([], autoInvoke: false);
+ var chatCompletionsOptions = new ChatCompletionsOptions();
+
+ // Act
+ enabledFunctions.ConfigureOptions(null, chatCompletionsOptions);
+
+ // Assert
+ Assert.Null(chatCompletionsOptions.ToolChoice);
+ Assert.Empty(chatCompletionsOptions.Tools);
+ }
+
+ [Fact]
+ public void EnabledFunctionsConfigureOptionsWithAutoInvokeAndNullKernelThrowsException()
+ {
+ // Arrange
+ var functions = this.GetTestPlugin().GetFunctionsMetadata().Select(function => function.ToAzureOpenAIFunction());
+ var enabledFunctions = new EnabledFunctions(functions, autoInvoke: true);
+ var chatCompletionsOptions = new ChatCompletionsOptions();
+
+ // Act & Assert
+ var exception = Assert.Throws(() => enabledFunctions.ConfigureOptions(null, chatCompletionsOptions));
+ Assert.Equal($"Auto-invocation with {nameof(EnabledFunctions)} is not supported when no kernel is provided.", exception.Message);
+ }
+
+ [Fact]
+ public void EnabledFunctionsConfigureOptionsWithAutoInvokeAndEmptyKernelThrowsException()
+ {
+ // Arrange
+ var functions = this.GetTestPlugin().GetFunctionsMetadata().Select(function => function.ToAzureOpenAIFunction());
+ var enabledFunctions = new EnabledFunctions(functions, autoInvoke: true);
+ var chatCompletionsOptions = new ChatCompletionsOptions();
+ var kernel = Kernel.CreateBuilder().Build();
+
+ // Act & Assert
+ var exception = Assert.Throws(() => enabledFunctions.ConfigureOptions(kernel, chatCompletionsOptions));
+ Assert.Equal($"The specified {nameof(EnabledFunctions)} function MyPlugin-MyFunction is not available in the kernel.", exception.Message);
+ }
+
+ [Theory]
+ [InlineData(true)]
+ [InlineData(false)]
+ public void EnabledFunctionsConfigureOptionsWithKernelAndPluginsAddsTools(bool autoInvoke)
+ {
+ // Arrange
+ var plugin = this.GetTestPlugin();
+ var functions = plugin.GetFunctionsMetadata().Select(function => function.ToAzureOpenAIFunction());
+ var enabledFunctions = new EnabledFunctions(functions, autoInvoke);
+ var chatCompletionsOptions = new ChatCompletionsOptions();
+ var kernel = Kernel.CreateBuilder().Build();
+
+ kernel.Plugins.Add(plugin);
+
+ // Act
+ enabledFunctions.ConfigureOptions(kernel, chatCompletionsOptions);
+
+ // Assert
+ Assert.Equal(ChatCompletionsToolChoice.Auto, chatCompletionsOptions.ToolChoice);
+
+ this.AssertTools(chatCompletionsOptions);
+ }
+
+ [Fact]
+ public void RequiredFunctionsConfigureOptionsWithAutoInvokeAndNullKernelThrowsException()
+ {
+ // Arrange
+ var function = this.GetTestPlugin().GetFunctionsMetadata().Select(function => function.ToAzureOpenAIFunction()).First();
+ var requiredFunction = new RequiredFunction(function, autoInvoke: true);
+ var chatCompletionsOptions = new ChatCompletionsOptions();
+
+ // Act & Assert
+ var exception = Assert.Throws(() => requiredFunction.ConfigureOptions(null, chatCompletionsOptions));
+ Assert.Equal($"Auto-invocation with {nameof(RequiredFunction)} is not supported when no kernel is provided.", exception.Message);
+ }
+
+ [Fact]
+ public void RequiredFunctionsConfigureOptionsWithAutoInvokeAndEmptyKernelThrowsException()
+ {
+ // Arrange
+ var function = this.GetTestPlugin().GetFunctionsMetadata().Select(function => function.ToAzureOpenAIFunction()).First();
+ var requiredFunction = new RequiredFunction(function, autoInvoke: true);
+ var chatCompletionsOptions = new ChatCompletionsOptions();
+ var kernel = Kernel.CreateBuilder().Build();
+
+ // Act & Assert
+ var exception = Assert.Throws(() => requiredFunction.ConfigureOptions(kernel, chatCompletionsOptions));
+ Assert.Equal($"The specified {nameof(RequiredFunction)} function MyPlugin-MyFunction is not available in the kernel.", exception.Message);
+ }
+
+ [Fact]
+ public void RequiredFunctionConfigureOptionsAddsTools()
+ {
+ // Arrange
+ var plugin = this.GetTestPlugin();
+ var function = plugin.GetFunctionsMetadata()[0].ToAzureOpenAIFunction();
+ var chatCompletionsOptions = new ChatCompletionsOptions();
+ var requiredFunction = new RequiredFunction(function, autoInvoke: true);
+ var kernel = new Kernel();
+ kernel.Plugins.Add(plugin);
+
+ // Act
+ requiredFunction.ConfigureOptions(kernel, chatCompletionsOptions);
+
+ // Assert
+ Assert.NotNull(chatCompletionsOptions.ToolChoice);
+
+ this.AssertTools(chatCompletionsOptions);
+ }
+
+ private KernelPlugin GetTestPlugin()
+ {
+ var function = KernelFunctionFactory.CreateFromMethod(
+ (string parameter1, string parameter2) => "Result1",
+ "MyFunction",
+ "Test Function",
+ [new KernelParameterMetadata("parameter1"), new KernelParameterMetadata("parameter2")],
+ new KernelReturnParameterMetadata { ParameterType = typeof(string), Description = "Function Result" });
+
+ return KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]);
+ }
+
+ private void AssertTools(ChatCompletionsOptions chatCompletionsOptions)
+ {
+ Assert.Single(chatCompletionsOptions.Tools);
+
+ var tool = chatCompletionsOptions.Tools[0] as ChatCompletionsFunctionToolDefinition;
+
+ Assert.NotNull(tool);
+
+ Assert.Equal("MyPlugin-MyFunction", tool.Name);
+ Assert.Equal("Test Function", tool.Description);
+ Assert.Equal("{\"type\":\"object\",\"required\":[],\"properties\":{\"parameter1\":{\"type\":\"string\"},\"parameter2\":{\"type\":\"string\"}}}", tool.Parameters.ToString());
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/ChatCompletion/AzureOpenAIChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/ChatCompletion/AzureOpenAIChatCompletionServiceTests.cs
new file mode 100644
index 000000000000..69c314bdcb46
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/ChatCompletion/AzureOpenAIChatCompletionServiceTests.cs
@@ -0,0 +1,958 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Net;
+using System.Net.Http;
+using System.Text;
+using System.Text.Json;
+using System.Threading.Tasks;
+using Azure.AI.OpenAI;
+using Azure.Core;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.Extensions.Logging;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+using Moq;
+
+namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.ChatCompletion;
+
+///
+/// Unit tests for
+///
+public sealed class AzureOpenAIChatCompletionServiceTests : IDisposable
+{
+ private readonly MultipleHttpMessageHandlerStub _messageHandlerStub;
+ private readonly HttpClient _httpClient;
+ private readonly Mock _mockLoggerFactory;
+
+ public AzureOpenAIChatCompletionServiceTests()
+ {
+ this._messageHandlerStub = new MultipleHttpMessageHandlerStub();
+ this._httpClient = new HttpClient(this._messageHandlerStub, false);
+ this._mockLoggerFactory = new Mock();
+
+ var mockLogger = new Mock();
+
+ mockLogger.Setup(l => l.IsEnabled(It.IsAny())).Returns(true);
+
+ this._mockLoggerFactory.Setup(l => l.CreateLogger(It.IsAny())).Returns(mockLogger.Object);
+ }
+
+ [Theory]
+ [InlineData(true)]
+ [InlineData(false)]
+ public void ConstructorWithApiKeyWorksCorrectly(bool includeLoggerFactory)
+ {
+ // Arrange & Act
+ var service = includeLoggerFactory ?
+ new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", loggerFactory: this._mockLoggerFactory.Object) :
+ new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id");
+
+ // Assert
+ Assert.NotNull(service);
+ Assert.Equal("model-id", service.Attributes["ModelId"]);
+ }
+
+ [Theory]
+ [InlineData(true)]
+ [InlineData(false)]
+ public void ConstructorWithTokenCredentialWorksCorrectly(bool includeLoggerFactory)
+ {
+ // Arrange & Act
+ var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken());
+ var service = includeLoggerFactory ?
+ new AzureOpenAIChatCompletionService("deployment", "https://endpoint", credentials, "model-id", loggerFactory: this._mockLoggerFactory.Object) :
+ new AzureOpenAIChatCompletionService("deployment", "https://endpoint", credentials, "model-id");
+
+ // Assert
+ Assert.NotNull(service);
+ Assert.Equal("model-id", service.Attributes["ModelId"]);
+ }
+
+ [Theory]
+ [InlineData(true)]
+ [InlineData(false)]
+ public void ConstructorWithOpenAIClientWorksCorrectly(bool includeLoggerFactory)
+ {
+ // Arrange & Act
+ var client = new OpenAIClient("key");
+ var service = includeLoggerFactory ?
+ new AzureOpenAIChatCompletionService("deployment", client, "model-id", loggerFactory: this._mockLoggerFactory.Object) :
+ new AzureOpenAIChatCompletionService("deployment", client, "model-id");
+
+ // Assert
+ Assert.NotNull(service);
+ Assert.Equal("model-id", service.Attributes["ModelId"]);
+ }
+
+ [Fact]
+ public async Task GetTextContentsWorksCorrectlyAsync()
+ {
+ // Arrange
+ var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
+ this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK)
+ {
+ Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
+ });
+
+ // Act
+ var result = await service.GetTextContentsAsync("Prompt");
+
+ // Assert
+ Assert.True(result.Count > 0);
+ Assert.Equal("Test chat response", result[0].Text);
+
+ var usage = result[0].Metadata?["Usage"] as CompletionsUsage;
+
+ Assert.NotNull(usage);
+ Assert.Equal(55, usage.PromptTokens);
+ Assert.Equal(100, usage.CompletionTokens);
+ Assert.Equal(155, usage.TotalTokens);
+ }
+
+ [Fact]
+ public async Task GetChatMessageContentsWithEmptyChoicesThrowsExceptionAsync()
+ {
+ // Arrange
+ var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
+ this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK)
+ {
+ Content = new StringContent("{\"id\":\"response-id\",\"object\":\"chat.completion\",\"created\":1704208954,\"model\":\"gpt-4\",\"choices\":[],\"usage\":{\"prompt_tokens\":55,\"completion_tokens\":100,\"total_tokens\":155},\"system_fingerprint\":null}")
+ });
+
+ // Act & Assert
+ var exception = await Assert.ThrowsAsync(() => service.GetChatMessageContentsAsync([]));
+
+ Assert.Equal("Chat completions not found", exception.Message);
+ }
+
+ [Theory]
+ [InlineData(0)]
+ [InlineData(129)]
+ public async Task GetChatMessageContentsWithInvalidResultsPerPromptValueThrowsExceptionAsync(int resultsPerPrompt)
+ {
+ // Arrange
+ var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
+ var settings = new AzureOpenAIPromptExecutionSettings { ResultsPerPrompt = resultsPerPrompt };
+
+ // Act & Assert
+ var exception = await Assert.ThrowsAsync(() => service.GetChatMessageContentsAsync([], settings));
+
+ Assert.Contains("The value must be in range between", exception.Message, StringComparison.OrdinalIgnoreCase);
+ }
+
+ [Fact]
+ public async Task GetChatMessageContentsHandlesSettingsCorrectlyAsync()
+ {
+ // Arrange
+ var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
+ var settings = new AzureOpenAIPromptExecutionSettings()
+ {
+ MaxTokens = 123,
+ Temperature = 0.6,
+ TopP = 0.5,
+ FrequencyPenalty = 1.6,
+ PresencePenalty = 1.2,
+ ResultsPerPrompt = 5,
+ Seed = 567,
+ TokenSelectionBiases = new Dictionary { { 2, 3 } },
+ StopSequences = ["stop_sequence"],
+ Logprobs = true,
+ TopLogprobs = 5,
+ AzureChatExtensionsOptions = new AzureChatExtensionsOptions
+ {
+ Extensions =
+ {
+ new AzureSearchChatExtensionConfiguration
+ {
+ SearchEndpoint = new Uri("http://test-search-endpoint"),
+ IndexName = "test-index-name"
+ }
+ }
+ }
+ };
+
+ var chatHistory = new ChatHistory();
+ chatHistory.AddUserMessage("User Message");
+ chatHistory.AddUserMessage([new ImageContent(new Uri("https://image")), new TextContent("User Message")]);
+ chatHistory.AddSystemMessage("System Message");
+ chatHistory.AddAssistantMessage("Assistant Message");
+
+ this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK)
+ {
+ Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
+ });
+
+ // Act
+ var result = await service.GetChatMessageContentsAsync(chatHistory, settings);
+
+ // Assert
+ var requestContent = this._messageHandlerStub.RequestContents[0];
+
+ Assert.NotNull(requestContent);
+
+ var content = JsonSerializer.Deserialize(Encoding.UTF8.GetString(requestContent));
+
+ var messages = content.GetProperty("messages");
+
+ var userMessage = messages[0];
+ var userMessageCollection = messages[1];
+ var systemMessage = messages[2];
+ var assistantMessage = messages[3];
+
+ Assert.Equal("user", userMessage.GetProperty("role").GetString());
+ Assert.Equal("User Message", userMessage.GetProperty("content").GetString());
+
+ Assert.Equal("user", userMessageCollection.GetProperty("role").GetString());
+ var contentItems = userMessageCollection.GetProperty("content");
+ Assert.Equal(2, contentItems.GetArrayLength());
+ Assert.Equal("https://image/", contentItems[0].GetProperty("image_url").GetProperty("url").GetString());
+ Assert.Equal("image_url", contentItems[0].GetProperty("type").GetString());
+ Assert.Equal("User Message", contentItems[1].GetProperty("text").GetString());
+ Assert.Equal("text", contentItems[1].GetProperty("type").GetString());
+
+ Assert.Equal("system", systemMessage.GetProperty("role").GetString());
+ Assert.Equal("System Message", systemMessage.GetProperty("content").GetString());
+
+ Assert.Equal("assistant", assistantMessage.GetProperty("role").GetString());
+ Assert.Equal("Assistant Message", assistantMessage.GetProperty("content").GetString());
+
+ Assert.Equal(123, content.GetProperty("max_tokens").GetInt32());
+ Assert.Equal(0.6, content.GetProperty("temperature").GetDouble());
+ Assert.Equal(0.5, content.GetProperty("top_p").GetDouble());
+ Assert.Equal(1.6, content.GetProperty("frequency_penalty").GetDouble());
+ Assert.Equal(1.2, content.GetProperty("presence_penalty").GetDouble());
+ Assert.Equal(5, content.GetProperty("n").GetInt32());
+ Assert.Equal(567, content.GetProperty("seed").GetInt32());
+ Assert.Equal(3, content.GetProperty("logit_bias").GetProperty("2").GetInt32());
+ Assert.Equal("stop_sequence", content.GetProperty("stop")[0].GetString());
+ Assert.True(content.GetProperty("logprobs").GetBoolean());
+ Assert.Equal(5, content.GetProperty("top_logprobs").GetInt32());
+
+ var dataSources = content.GetProperty("data_sources");
+ Assert.Equal(1, dataSources.GetArrayLength());
+ Assert.Equal("azure_search", dataSources[0].GetProperty("type").GetString());
+
+ var dataSourceParameters = dataSources[0].GetProperty("parameters");
+ Assert.Equal("http://test-search-endpoint/", dataSourceParameters.GetProperty("endpoint").GetString());
+ Assert.Equal("test-index-name", dataSourceParameters.GetProperty("index_name").GetString());
+ }
+
+ [Theory]
+ [MemberData(nameof(ResponseFormats))]
+ public async Task GetChatMessageContentsHandlesResponseFormatCorrectlyAsync(object responseFormat, string? expectedResponseType)
+ {
+ // Arrange
+ var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
+ var settings = new AzureOpenAIPromptExecutionSettings
+ {
+ ResponseFormat = responseFormat
+ };
+
+ this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK)
+ {
+ Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
+ });
+
+ // Act
+ var result = await service.GetChatMessageContentsAsync([], settings);
+
+ // Assert
+ var requestContent = this._messageHandlerStub.RequestContents[0];
+
+ Assert.NotNull(requestContent);
+
+ var content = JsonSerializer.Deserialize(Encoding.UTF8.GetString(requestContent));
+
+ Assert.Equal(expectedResponseType, content.GetProperty("response_format").GetProperty("type").GetString());
+ }
+
+ [Theory]
+ [MemberData(nameof(ToolCallBehaviors))]
+ public async Task GetChatMessageContentsWorksCorrectlyAsync(AzureToolCallBehavior behavior)
+ {
+ // Arrange
+ var kernel = Kernel.CreateBuilder().Build();
+ var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = behavior };
+
+ this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK)
+ {
+ Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
+ });
+
+ // Act
+ var result = await service.GetChatMessageContentsAsync([], settings, kernel);
+
+ // Assert
+ Assert.True(result.Count > 0);
+ Assert.Equal("Test chat response", result[0].Content);
+
+ var usage = result[0].Metadata?["Usage"] as CompletionsUsage;
+
+ Assert.NotNull(usage);
+ Assert.Equal(55, usage.PromptTokens);
+ Assert.Equal(100, usage.CompletionTokens);
+ Assert.Equal(155, usage.TotalTokens);
+
+ Assert.Equal("stop", result[0].Metadata?["FinishReason"]);
+ }
+
+ [Fact]
+ public async Task GetChatMessageContentsWithFunctionCallAsync()
+ {
+ // Arrange
+ int functionCallCount = 0;
+
+ var kernel = Kernel.CreateBuilder().Build();
+ var function1 = KernelFunctionFactory.CreateFromMethod((string location) =>
+ {
+ functionCallCount++;
+ return "Some weather";
+ }, "GetCurrentWeather");
+
+ var function2 = KernelFunctionFactory.CreateFromMethod((string argument) =>
+ {
+ functionCallCount++;
+ throw new ArgumentException("Some exception");
+ }, "FunctionWithException");
+
+ kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]));
+
+ var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object);
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions };
+
+ using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_multiple_function_calls_test_response.json")) };
+ using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) };
+
+ this._messageHandlerStub.ResponsesToReturn = [response1, response2];
+
+ // Act
+ var result = await service.GetChatMessageContentsAsync([], settings, kernel);
+
+ // Assert
+ Assert.True(result.Count > 0);
+ Assert.Equal("Test chat response", result[0].Content);
+
+ Assert.Equal(2, functionCallCount);
+ }
+
+ [Fact]
+ public async Task GetChatMessageContentsWithFunctionCallMaximumAutoInvokeAttemptsAsync()
+ {
+ // Arrange
+ const int DefaultMaximumAutoInvokeAttempts = 128;
+ const int ModelResponsesCount = 129;
+
+ int functionCallCount = 0;
+
+ var kernel = Kernel.CreateBuilder().Build();
+ var function = KernelFunctionFactory.CreateFromMethod((string location) =>
+ {
+ functionCallCount++;
+ return "Some weather";
+ }, "GetCurrentWeather");
+
+ kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]));
+
+ var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object);
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions };
+
+ var responses = new List();
+
+ for (var i = 0; i < ModelResponsesCount; i++)
+ {
+ responses.Add(new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_single_function_call_test_response.json")) });
+ }
+
+ this._messageHandlerStub.ResponsesToReturn = responses;
+
+ // Act
+ var result = await service.GetChatMessageContentsAsync([], settings, kernel);
+
+ // Assert
+ Assert.Equal(DefaultMaximumAutoInvokeAttempts, functionCallCount);
+ }
+
+ [Fact]
+ public async Task GetChatMessageContentsWithRequiredFunctionCallAsync()
+ {
+ // Arrange
+ int functionCallCount = 0;
+
+ var kernel = Kernel.CreateBuilder().Build();
+ var function = KernelFunctionFactory.CreateFromMethod((string location) =>
+ {
+ functionCallCount++;
+ return "Some weather";
+ }, "GetCurrentWeather");
+
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]);
+ var openAIFunction = plugin.GetFunctionsMetadata().First().ToAzureOpenAIFunction();
+
+ kernel.Plugins.Add(plugin);
+
+ var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object);
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureToolCallBehavior.RequireFunction(openAIFunction, autoInvoke: true) };
+
+ using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_single_function_call_test_response.json")) };
+ using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) };
+
+ this._messageHandlerStub.ResponsesToReturn = [response1, response2];
+
+ // Act
+ var result = await service.GetChatMessageContentsAsync([], settings, kernel);
+
+ // Assert
+ Assert.Equal(1, functionCallCount);
+
+ var requestContents = this._messageHandlerStub.RequestContents;
+
+ Assert.Equal(2, requestContents.Count);
+
+ requestContents.ForEach(Assert.NotNull);
+
+ var firstContent = Encoding.UTF8.GetString(requestContents[0]!);
+ var secondContent = Encoding.UTF8.GetString(requestContents[1]!);
+
+ var firstContentJson = JsonSerializer.Deserialize(firstContent);
+ var secondContentJson = JsonSerializer.Deserialize(secondContent);
+
+ Assert.Equal(1, firstContentJson.GetProperty("tools").GetArrayLength());
+ Assert.Equal("MyPlugin-GetCurrentWeather", firstContentJson.GetProperty("tool_choice").GetProperty("function").GetProperty("name").GetString());
+
+ Assert.Equal("none", secondContentJson.GetProperty("tool_choice").GetString());
+ }
+
+ [Fact]
+ public async Task GetStreamingTextContentsWorksCorrectlyAsync()
+ {
+ // Arrange
+ var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
+ using var stream = new MemoryStream(Encoding.UTF8.GetBytes(AzureOpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt")));
+
+ this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK)
+ {
+ Content = new StreamContent(stream)
+ });
+
+ // Act & Assert
+ var enumerator = service.GetStreamingTextContentsAsync("Prompt").GetAsyncEnumerator();
+
+ await enumerator.MoveNextAsync();
+ Assert.Equal("Test chat streaming response", enumerator.Current.Text);
+
+ await enumerator.MoveNextAsync();
+ Assert.Equal("stop", enumerator.Current.Metadata?["FinishReason"]);
+ }
+
+ [Fact]
+ public async Task GetStreamingChatMessageContentsWorksCorrectlyAsync()
+ {
+ // Arrange
+ var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
+ using var stream = new MemoryStream(Encoding.UTF8.GetBytes(AzureOpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt")));
+
+ this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK)
+ {
+ Content = new StreamContent(stream)
+ });
+
+ // Act & Assert
+ var enumerator = service.GetStreamingChatMessageContentsAsync([]).GetAsyncEnumerator();
+
+ await enumerator.MoveNextAsync();
+ Assert.Equal("Test chat streaming response", enumerator.Current.Content);
+
+ await enumerator.MoveNextAsync();
+ Assert.Equal("stop", enumerator.Current.Metadata?["FinishReason"]);
+ }
+
+ [Fact]
+ public async Task GetStreamingChatMessageContentsWithFunctionCallAsync()
+ {
+ // Arrange
+ int functionCallCount = 0;
+
+ var kernel = Kernel.CreateBuilder().Build();
+ var function1 = KernelFunctionFactory.CreateFromMethod((string location) =>
+ {
+ functionCallCount++;
+ return "Some weather";
+ }, "GetCurrentWeather");
+
+ var function2 = KernelFunctionFactory.CreateFromMethod((string argument) =>
+ {
+ functionCallCount++;
+ throw new ArgumentException("Some exception");
+ }, "FunctionWithException");
+
+ kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]));
+
+ var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object);
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions };
+
+ using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_streaming_multiple_function_calls_test_response.txt")) };
+ using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt")) };
+
+ this._messageHandlerStub.ResponsesToReturn = [response1, response2];
+
+ // Act & Assert
+ var enumerator = service.GetStreamingChatMessageContentsAsync([], settings, kernel).GetAsyncEnumerator();
+
+ await enumerator.MoveNextAsync();
+ Assert.Equal("Test chat streaming response", enumerator.Current.Content);
+ Assert.Equal("tool_calls", enumerator.Current.Metadata?["FinishReason"]);
+
+ await enumerator.MoveNextAsync();
+ Assert.Equal("tool_calls", enumerator.Current.Metadata?["FinishReason"]);
+
+ // Keep looping until the end of stream
+ while (await enumerator.MoveNextAsync())
+ {
+ }
+
+ Assert.Equal(2, functionCallCount);
+ }
+
+ [Fact]
+ public async Task GetStreamingChatMessageContentsWithFunctionCallMaximumAutoInvokeAttemptsAsync()
+ {
+ // Arrange
+ const int DefaultMaximumAutoInvokeAttempts = 128;
+ const int ModelResponsesCount = 129;
+
+ int functionCallCount = 0;
+
+ var kernel = Kernel.CreateBuilder().Build();
+ var function = KernelFunctionFactory.CreateFromMethod((string location) =>
+ {
+ functionCallCount++;
+ return "Some weather";
+ }, "GetCurrentWeather");
+
+ kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]));
+
+ var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object);
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions };
+
+ var responses = new List();
+
+ for (var i = 0; i < ModelResponsesCount; i++)
+ {
+ responses.Add(new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_streaming_single_function_call_test_response.txt")) });
+ }
+
+ this._messageHandlerStub.ResponsesToReturn = responses;
+
+ // Act & Assert
+ await foreach (var chunk in service.GetStreamingChatMessageContentsAsync([], settings, kernel))
+ {
+ Assert.Equal("Test chat streaming response", chunk.Content);
+ }
+
+ Assert.Equal(DefaultMaximumAutoInvokeAttempts, functionCallCount);
+ }
+
+ [Fact]
+ public async Task GetStreamingChatMessageContentsWithRequiredFunctionCallAsync()
+ {
+ // Arrange
+ int functionCallCount = 0;
+
+ var kernel = Kernel.CreateBuilder().Build();
+ var function = KernelFunctionFactory.CreateFromMethod((string location) =>
+ {
+ functionCallCount++;
+ return "Some weather";
+ }, "GetCurrentWeather");
+
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]);
+ var openAIFunction = plugin.GetFunctionsMetadata().First().ToAzureOpenAIFunction();
+
+ kernel.Plugins.Add(plugin);
+
+ var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object);
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureToolCallBehavior.RequireFunction(openAIFunction, autoInvoke: true) };
+
+ using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_streaming_single_function_call_test_response.txt")) };
+ using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt")) };
+
+ this._messageHandlerStub.ResponsesToReturn = [response1, response2];
+
+ // Act & Assert
+ var enumerator = service.GetStreamingChatMessageContentsAsync([], settings, kernel).GetAsyncEnumerator();
+
+ // Function Tool Call Streaming (One Chunk)
+ await enumerator.MoveNextAsync();
+ Assert.Equal("Test chat streaming response", enumerator.Current.Content);
+ Assert.Equal("tool_calls", enumerator.Current.Metadata?["FinishReason"]);
+
+ // Chat Completion Streaming (1st Chunk)
+ await enumerator.MoveNextAsync();
+ Assert.Null(enumerator.Current.Metadata?["FinishReason"]);
+
+ // Chat Completion Streaming (2nd Chunk)
+ await enumerator.MoveNextAsync();
+ Assert.Equal("stop", enumerator.Current.Metadata?["FinishReason"]);
+
+ Assert.Equal(1, functionCallCount);
+
+ var requestContents = this._messageHandlerStub.RequestContents;
+
+ Assert.Equal(2, requestContents.Count);
+
+ requestContents.ForEach(Assert.NotNull);
+
+ var firstContent = Encoding.UTF8.GetString(requestContents[0]!);
+ var secondContent = Encoding.UTF8.GetString(requestContents[1]!);
+
+ var firstContentJson = JsonSerializer.Deserialize(firstContent);
+ var secondContentJson = JsonSerializer.Deserialize(secondContent);
+
+ Assert.Equal(1, firstContentJson.GetProperty("tools").GetArrayLength());
+ Assert.Equal("MyPlugin-GetCurrentWeather", firstContentJson.GetProperty("tool_choice").GetProperty("function").GetProperty("name").GetString());
+
+ Assert.Equal("none", secondContentJson.GetProperty("tool_choice").GetString());
+ }
+
+ [Fact]
+ public async Task GetChatMessageContentsUsesPromptAndSettingsCorrectlyAsync()
+ {
+ // Arrange
+ const string Prompt = "This is test prompt";
+ const string SystemMessage = "This is test system message";
+
+ var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
+ var settings = new AzureOpenAIPromptExecutionSettings() { ChatSystemPrompt = SystemMessage };
+
+ this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK)
+ {
+ Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
+ });
+
+ IKernelBuilder builder = Kernel.CreateBuilder();
+ builder.Services.AddTransient((sp) => service);
+ Kernel kernel = builder.Build();
+
+ // Act
+ var result = await kernel.InvokePromptAsync(Prompt, new(settings));
+
+ // Assert
+ Assert.Equal("Test chat response", result.ToString());
+
+ var requestContentByteArray = this._messageHandlerStub.RequestContents[0];
+
+ Assert.NotNull(requestContentByteArray);
+
+ var requestContent = JsonSerializer.Deserialize(Encoding.UTF8.GetString(requestContentByteArray));
+
+ var messages = requestContent.GetProperty("messages");
+
+ Assert.Equal(2, messages.GetArrayLength());
+
+ Assert.Equal(SystemMessage, messages[0].GetProperty("content").GetString());
+ Assert.Equal("system", messages[0].GetProperty("role").GetString());
+
+ Assert.Equal(Prompt, messages[1].GetProperty("content").GetString());
+ Assert.Equal("user", messages[1].GetProperty("role").GetString());
+ }
+
+ [Fact]
+ public async Task GetChatMessageContentsWithChatMessageContentItemCollectionAndSettingsCorrectlyAsync()
+ {
+ // Arrange
+ const string Prompt = "This is test prompt";
+ const string SystemMessage = "This is test system message";
+ const string AssistantMessage = "This is assistant message";
+ const string CollectionItemPrompt = "This is collection item prompt";
+
+ var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
+ var settings = new AzureOpenAIPromptExecutionSettings() { ChatSystemPrompt = SystemMessage };
+
+ this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK)
+ {
+ Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
+ });
+
+ var chatHistory = new ChatHistory();
+ chatHistory.AddUserMessage(Prompt);
+ chatHistory.AddAssistantMessage(AssistantMessage);
+ chatHistory.AddUserMessage(
+ [
+ new TextContent(CollectionItemPrompt),
+ new ImageContent(new Uri("https://image"))
+ ]);
+
+ // Act
+ var result = await service.GetChatMessageContentsAsync(chatHistory, settings);
+
+ // Assert
+ Assert.True(result.Count > 0);
+ Assert.Equal("Test chat response", result[0].Content);
+
+ var requestContentByteArray = this._messageHandlerStub.RequestContents[0];
+
+ Assert.NotNull(requestContentByteArray);
+
+ var requestContent = JsonSerializer.Deserialize(Encoding.UTF8.GetString(requestContentByteArray));
+
+ var messages = requestContent.GetProperty("messages");
+
+ Assert.Equal(4, messages.GetArrayLength());
+
+ Assert.Equal(SystemMessage, messages[0].GetProperty("content").GetString());
+ Assert.Equal("system", messages[0].GetProperty("role").GetString());
+
+ Assert.Equal(Prompt, messages[1].GetProperty("content").GetString());
+ Assert.Equal("user", messages[1].GetProperty("role").GetString());
+
+ Assert.Equal(AssistantMessage, messages[2].GetProperty("content").GetString());
+ Assert.Equal("assistant", messages[2].GetProperty("role").GetString());
+
+ var contentItems = messages[3].GetProperty("content");
+ Assert.Equal(2, contentItems.GetArrayLength());
+ Assert.Equal(CollectionItemPrompt, contentItems[0].GetProperty("text").GetString());
+ Assert.Equal("text", contentItems[0].GetProperty("type").GetString());
+ Assert.Equal("https://image/", contentItems[1].GetProperty("image_url").GetProperty("url").GetString());
+ Assert.Equal("image_url", contentItems[1].GetProperty("type").GetString());
+ }
+
+ [Fact]
+ public async Task FunctionCallsShouldBePropagatedToCallersViaChatMessageItemsOfTypeFunctionCallContentAsync()
+ {
+ // Arrange
+ this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(System.Net.HttpStatusCode.OK)
+ {
+ Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_multiple_function_calls_test_response.json"))
+ });
+
+ var sut = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
+
+ var chatHistory = new ChatHistory();
+ chatHistory.AddUserMessage("Fake prompt");
+
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureToolCallBehavior.EnableKernelFunctions };
+
+ // Act
+ var result = await sut.GetChatMessageContentAsync(chatHistory, settings);
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.Equal(5, result.Items.Count);
+
+ var getCurrentWeatherFunctionCall = result.Items[0] as FunctionCallContent;
+ Assert.NotNull(getCurrentWeatherFunctionCall);
+ Assert.Equal("GetCurrentWeather", getCurrentWeatherFunctionCall.FunctionName);
+ Assert.Equal("MyPlugin", getCurrentWeatherFunctionCall.PluginName);
+ Assert.Equal("1", getCurrentWeatherFunctionCall.Id);
+ Assert.Equal("Boston, MA", getCurrentWeatherFunctionCall.Arguments?["location"]?.ToString());
+
+ var functionWithExceptionFunctionCall = result.Items[1] as FunctionCallContent;
+ Assert.NotNull(functionWithExceptionFunctionCall);
+ Assert.Equal("FunctionWithException", functionWithExceptionFunctionCall.FunctionName);
+ Assert.Equal("MyPlugin", functionWithExceptionFunctionCall.PluginName);
+ Assert.Equal("2", functionWithExceptionFunctionCall.Id);
+ Assert.Equal("value", functionWithExceptionFunctionCall.Arguments?["argument"]?.ToString());
+
+ var nonExistentFunctionCall = result.Items[2] as FunctionCallContent;
+ Assert.NotNull(nonExistentFunctionCall);
+ Assert.Equal("NonExistentFunction", nonExistentFunctionCall.FunctionName);
+ Assert.Equal("MyPlugin", nonExistentFunctionCall.PluginName);
+ Assert.Equal("3", nonExistentFunctionCall.Id);
+ Assert.Equal("value", nonExistentFunctionCall.Arguments?["argument"]?.ToString());
+
+ var invalidArgumentsFunctionCall = result.Items[3] as FunctionCallContent;
+ Assert.NotNull(invalidArgumentsFunctionCall);
+ Assert.Equal("InvalidArguments", invalidArgumentsFunctionCall.FunctionName);
+ Assert.Equal("MyPlugin", invalidArgumentsFunctionCall.PluginName);
+ Assert.Equal("4", invalidArgumentsFunctionCall.Id);
+ Assert.Null(invalidArgumentsFunctionCall.Arguments);
+ Assert.NotNull(invalidArgumentsFunctionCall.Exception);
+ Assert.Equal("Error: Function call arguments were invalid JSON.", invalidArgumentsFunctionCall.Exception.Message);
+ Assert.NotNull(invalidArgumentsFunctionCall.Exception.InnerException);
+
+ var intArgumentsFunctionCall = result.Items[4] as FunctionCallContent;
+ Assert.NotNull(intArgumentsFunctionCall);
+ Assert.Equal("IntArguments", intArgumentsFunctionCall.FunctionName);
+ Assert.Equal("MyPlugin", intArgumentsFunctionCall.PluginName);
+ Assert.Equal("5", intArgumentsFunctionCall.Id);
+ Assert.Equal("36", intArgumentsFunctionCall.Arguments?["age"]?.ToString());
+ }
+
+ [Fact]
+ public async Task FunctionCallsShouldBeReturnedToLLMAsync()
+ {
+ // Arrange
+ this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(System.Net.HttpStatusCode.OK)
+ {
+ Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
+ });
+
+ var sut = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
+
+ var items = new ChatMessageContentItemCollection
+ {
+ new FunctionCallContent("GetCurrentWeather", "MyPlugin", "1", new KernelArguments() { ["location"] = "Boston, MA" }),
+ new FunctionCallContent("GetWeatherForecast", "MyPlugin", "2", new KernelArguments() { ["location"] = "Boston, MA" })
+ };
+
+ ChatHistory chatHistory =
+ [
+ new ChatMessageContent(AuthorRole.Assistant, items)
+ ];
+
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureToolCallBehavior.EnableKernelFunctions };
+
+ // Act
+ await sut.GetChatMessageContentAsync(chatHistory, settings);
+
+ // Assert
+ var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContents[0]!);
+ Assert.NotNull(actualRequestContent);
+
+ var optionsJson = JsonSerializer.Deserialize(actualRequestContent);
+
+ var messages = optionsJson.GetProperty("messages");
+ Assert.Equal(1, messages.GetArrayLength());
+
+ var assistantMessage = messages[0];
+ Assert.Equal("assistant", assistantMessage.GetProperty("role").GetString());
+
+ Assert.Equal(2, assistantMessage.GetProperty("tool_calls").GetArrayLength());
+
+ var tool1 = assistantMessage.GetProperty("tool_calls")[0];
+ Assert.Equal("1", tool1.GetProperty("id").GetString());
+ Assert.Equal("function", tool1.GetProperty("type").GetString());
+
+ var function1 = tool1.GetProperty("function");
+ Assert.Equal("MyPlugin-GetCurrentWeather", function1.GetProperty("name").GetString());
+ Assert.Equal("{\"location\":\"Boston, MA\"}", function1.GetProperty("arguments").GetString());
+
+ var tool2 = assistantMessage.GetProperty("tool_calls")[1];
+ Assert.Equal("2", tool2.GetProperty("id").GetString());
+ Assert.Equal("function", tool2.GetProperty("type").GetString());
+
+ var function2 = tool2.GetProperty("function");
+ Assert.Equal("MyPlugin-GetWeatherForecast", function2.GetProperty("name").GetString());
+ Assert.Equal("{\"location\":\"Boston, MA\"}", function2.GetProperty("arguments").GetString());
+ }
+
+ [Fact]
+ public async Task FunctionResultsCanBeProvidedToLLMAsOneResultPerChatMessageAsync()
+ {
+ // Arrange
+ this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(System.Net.HttpStatusCode.OK)
+ {
+ Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
+ });
+
+ var sut = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
+
+ var chatHistory = new ChatHistory
+ {
+ new ChatMessageContent(AuthorRole.Tool,
+ [
+ new FunctionResultContent(new FunctionCallContent("GetCurrentWeather", "MyPlugin", "1", new KernelArguments() { ["location"] = "Boston, MA" }), "rainy"),
+ ]),
+ new ChatMessageContent(AuthorRole.Tool,
+ [
+ new FunctionResultContent(new FunctionCallContent("GetWeatherForecast", "MyPlugin", "2", new KernelArguments() { ["location"] = "Boston, MA" }), "sunny")
+ ])
+ };
+
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureToolCallBehavior.EnableKernelFunctions };
+
+ // Act
+ await sut.GetChatMessageContentAsync(chatHistory, settings);
+
+ // Assert
+ var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContents[0]!);
+ Assert.NotNull(actualRequestContent);
+
+ var optionsJson = JsonSerializer.Deserialize(actualRequestContent);
+
+ var messages = optionsJson.GetProperty("messages");
+ Assert.Equal(2, messages.GetArrayLength());
+
+ var assistantMessage = messages[0];
+ Assert.Equal("tool", assistantMessage.GetProperty("role").GetString());
+ Assert.Equal("rainy", assistantMessage.GetProperty("content").GetString());
+ Assert.Equal("1", assistantMessage.GetProperty("tool_call_id").GetString());
+
+ var assistantMessage2 = messages[1];
+ Assert.Equal("tool", assistantMessage2.GetProperty("role").GetString());
+ Assert.Equal("sunny", assistantMessage2.GetProperty("content").GetString());
+ Assert.Equal("2", assistantMessage2.GetProperty("tool_call_id").GetString());
+ }
+
+ [Fact]
+ public async Task FunctionResultsCanBeProvidedToLLMAsManyResultsInOneChatMessageAsync()
+ {
+ // Arrange
+ this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(System.Net.HttpStatusCode.OK)
+ {
+ Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
+ });
+
+ var sut = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
+
+ var chatHistory = new ChatHistory
+ {
+ new ChatMessageContent(AuthorRole.Tool,
+ [
+ new FunctionResultContent(new FunctionCallContent("GetCurrentWeather", "MyPlugin", "1", new KernelArguments() { ["location"] = "Boston, MA" }), "rainy"),
+ new FunctionResultContent(new FunctionCallContent("GetWeatherForecast", "MyPlugin", "2", new KernelArguments() { ["location"] = "Boston, MA" }), "sunny")
+ ])
+ };
+
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureToolCallBehavior.EnableKernelFunctions };
+
+ // Act
+ await sut.GetChatMessageContentAsync(chatHistory, settings);
+
+ // Assert
+ var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContents[0]!);
+ Assert.NotNull(actualRequestContent);
+
+ var optionsJson = JsonSerializer.Deserialize(actualRequestContent);
+
+ var messages = optionsJson.GetProperty("messages");
+ Assert.Equal(2, messages.GetArrayLength());
+
+ var assistantMessage = messages[0];
+ Assert.Equal("tool", assistantMessage.GetProperty("role").GetString());
+ Assert.Equal("rainy", assistantMessage.GetProperty("content").GetString());
+ Assert.Equal("1", assistantMessage.GetProperty("tool_call_id").GetString());
+
+ var assistantMessage2 = messages[1];
+ Assert.Equal("tool", assistantMessage2.GetProperty("role").GetString());
+ Assert.Equal("sunny", assistantMessage2.GetProperty("content").GetString());
+ Assert.Equal("2", assistantMessage2.GetProperty("tool_call_id").GetString());
+ }
+
+ public void Dispose()
+ {
+ this._httpClient.Dispose();
+ this._messageHandlerStub.Dispose();
+ }
+
+ public static TheoryData ToolCallBehaviors => new()
+ {
+ AzureToolCallBehavior.EnableKernelFunctions,
+ AzureToolCallBehavior.AutoInvokeKernelFunctions
+ };
+
+ public static TheoryData
+
+
+ Always
+
+
+
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureOpenAIChatMessageContentTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureOpenAIChatMessageContentTests.cs
new file mode 100644
index 000000000000..304e62bc9aeb
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureOpenAIChatMessageContentTests.cs
@@ -0,0 +1,124 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Collections;
+using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
+using Azure.AI.OpenAI;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+
+namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Core;
+
+///
+/// Unit tests for class.
+///
+public sealed class AzureOpenAIChatMessageContentTests
+{
+ [Fact]
+ public void ConstructorsWorkCorrectly()
+ {
+ // Arrange
+ List toolCalls = [new FakeChatCompletionsToolCall("id")];
+
+ // Act
+ var content1 = new AzureOpenAIChatMessageContent(new ChatRole("user"), "content1", "model-id1", toolCalls) { AuthorName = "Fred" };
+ var content2 = new AzureOpenAIChatMessageContent(AuthorRole.User, "content2", "model-id2", toolCalls);
+
+ // Assert
+ this.AssertChatMessageContent(AuthorRole.User, "content1", "model-id1", toolCalls, content1, "Fred");
+ this.AssertChatMessageContent(AuthorRole.User, "content2", "model-id2", toolCalls, content2);
+ }
+
+ [Fact]
+ public void GetOpenAIFunctionToolCallsReturnsCorrectList()
+ {
+ // Arrange
+ List toolCalls = [
+ new ChatCompletionsFunctionToolCall("id1", "name", string.Empty),
+ new ChatCompletionsFunctionToolCall("id2", "name", string.Empty),
+ new FakeChatCompletionsToolCall("id3"),
+ new FakeChatCompletionsToolCall("id4")];
+
+ var content1 = new AzureOpenAIChatMessageContent(AuthorRole.User, "content", "model-id", toolCalls);
+ var content2 = new AzureOpenAIChatMessageContent(AuthorRole.User, "content", "model-id", []);
+
+ // Act
+ var actualToolCalls1 = content1.GetOpenAIFunctionToolCalls();
+ var actualToolCalls2 = content2.GetOpenAIFunctionToolCalls();
+
+ // Assert
+ Assert.Equal(2, actualToolCalls1.Count);
+ Assert.Equal("id1", actualToolCalls1[0].Id);
+ Assert.Equal("id2", actualToolCalls1[1].Id);
+
+ Assert.Empty(actualToolCalls2);
+ }
+
+ [Theory]
+ [InlineData(false)]
+ [InlineData(true)]
+ public void MetadataIsInitializedCorrectly(bool readOnlyMetadata)
+ {
+ // Arrange
+ IReadOnlyDictionary metadata = readOnlyMetadata ?
+ new CustomReadOnlyDictionary(new Dictionary { { "key", "value" } }) :
+ new Dictionary { { "key", "value" } };
+
+ List toolCalls = [
+ new ChatCompletionsFunctionToolCall("id1", "name", string.Empty),
+ new ChatCompletionsFunctionToolCall("id2", "name", string.Empty),
+ new FakeChatCompletionsToolCall("id3"),
+ new FakeChatCompletionsToolCall("id4")];
+
+ // Act
+ var content1 = new AzureOpenAIChatMessageContent(AuthorRole.User, "content1", "model-id1", [], metadata);
+ var content2 = new AzureOpenAIChatMessageContent(AuthorRole.User, "content2", "model-id2", toolCalls, metadata);
+
+ // Assert
+ Assert.NotNull(content1.Metadata);
+ Assert.Single(content1.Metadata);
+
+ Assert.NotNull(content2.Metadata);
+ Assert.Equal(2, content2.Metadata.Count);
+ Assert.Equal("value", content2.Metadata["key"]);
+
+ Assert.IsType>(content2.Metadata["ChatResponseMessage.FunctionToolCalls"]);
+
+ var actualToolCalls = content2.Metadata["ChatResponseMessage.FunctionToolCalls"] as List;
+ Assert.NotNull(actualToolCalls);
+
+ Assert.Equal(2, actualToolCalls.Count);
+ Assert.Equal("id1", actualToolCalls[0].Id);
+ Assert.Equal("id2", actualToolCalls[1].Id);
+ }
+
+ private void AssertChatMessageContent(
+ AuthorRole expectedRole,
+ string expectedContent,
+ string expectedModelId,
+ IReadOnlyList expectedToolCalls,
+ AzureOpenAIChatMessageContent actualContent,
+ string? expectedName = null)
+ {
+ Assert.Equal(expectedRole, actualContent.Role);
+ Assert.Equal(expectedContent, actualContent.Content);
+ Assert.Equal(expectedName, actualContent.AuthorName);
+ Assert.Equal(expectedModelId, actualContent.ModelId);
+ Assert.Same(expectedToolCalls, actualContent.ToolCalls);
+ }
+
+ private sealed class FakeChatCompletionsToolCall(string id) : ChatCompletionsToolCall(id)
+ { }
+
+ private sealed class CustomReadOnlyDictionary(IDictionary dictionary) : IReadOnlyDictionary // explicitly not implementing IDictionary<>
+ {
+ public TValue this[TKey key] => dictionary[key];
+ public IEnumerable Keys => dictionary.Keys;
+ public IEnumerable Values => dictionary.Values;
+ public int Count => dictionary.Count;
+ public bool ContainsKey(TKey key) => dictionary.ContainsKey(key);
+ public IEnumerator> GetEnumerator() => dictionary.GetEnumerator();
+ public bool TryGetValue(TKey key, [MaybeNullWhen(false)] out TValue value) => dictionary.TryGetValue(key, out value);
+ IEnumerator IEnumerable.GetEnumerator() => dictionary.GetEnumerator();
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureOpenAIFunctionToolCallTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureOpenAIFunctionToolCallTests.cs
new file mode 100644
index 000000000000..8f16c6ea7db2
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureOpenAIFunctionToolCallTests.cs
@@ -0,0 +1,81 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Collections.Generic;
+using System.Text;
+using Azure.AI.OpenAI;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+
+namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Core;
+
+///
+/// Unit tests for class.
+///
+public sealed class AzureOpenAIFunctionToolCallTests
+{
+ [Theory]
+ [InlineData("MyFunction", "MyFunction")]
+ [InlineData("MyPlugin_MyFunction", "MyPlugin_MyFunction")]
+ public void FullyQualifiedNameReturnsValidName(string toolCallName, string expectedName)
+ {
+ // Arrange
+ var toolCall = new ChatCompletionsFunctionToolCall("id", toolCallName, string.Empty);
+ var openAIFunctionToolCall = new AzureOpenAIFunctionToolCall(toolCall);
+
+ // Act & Assert
+ Assert.Equal(expectedName, openAIFunctionToolCall.FullyQualifiedName);
+ Assert.Same(openAIFunctionToolCall.FullyQualifiedName, openAIFunctionToolCall.FullyQualifiedName);
+ }
+
+ [Fact]
+ public void ToStringReturnsCorrectValue()
+ {
+ // Arrange
+ var toolCall = new ChatCompletionsFunctionToolCall("id", "MyPlugin_MyFunction", "{\n \"location\": \"San Diego\",\n \"max_price\": 300\n}");
+ var openAIFunctionToolCall = new AzureOpenAIFunctionToolCall(toolCall);
+
+ // Act & Assert
+ Assert.Equal("MyPlugin_MyFunction(location:San Diego, max_price:300)", openAIFunctionToolCall.ToString());
+ }
+
+ [Fact]
+ public void ConvertToolCallUpdatesWithEmptyIndexesReturnsEmptyToolCalls()
+ {
+ // Arrange
+ var toolCallIdsByIndex = new Dictionary();
+ var functionNamesByIndex = new Dictionary();
+ var functionArgumentBuildersByIndex = new Dictionary();
+
+ // Act
+ var toolCalls = AzureOpenAIFunctionToolCall.ConvertToolCallUpdatesToChatCompletionsFunctionToolCalls(
+ ref toolCallIdsByIndex,
+ ref functionNamesByIndex,
+ ref functionArgumentBuildersByIndex);
+
+ // Assert
+ Assert.Empty(toolCalls);
+ }
+
+ [Fact]
+ public void ConvertToolCallUpdatesWithNotEmptyIndexesReturnsNotEmptyToolCalls()
+ {
+ // Arrange
+ var toolCallIdsByIndex = new Dictionary { { 3, "test-id" } };
+ var functionNamesByIndex = new Dictionary { { 3, "test-function" } };
+ var functionArgumentBuildersByIndex = new Dictionary { { 3, new("test-argument") } };
+
+ // Act
+ var toolCalls = AzureOpenAIFunctionToolCall.ConvertToolCallUpdatesToChatCompletionsFunctionToolCalls(
+ ref toolCallIdsByIndex,
+ ref functionNamesByIndex,
+ ref functionArgumentBuildersByIndex);
+
+ // Assert
+ Assert.Single(toolCalls);
+
+ var toolCall = toolCalls[0];
+
+ Assert.Equal("test-id", toolCall.Id);
+ Assert.Equal("test-function", toolCall.Name);
+ Assert.Equal("test-argument", toolCall.Arguments);
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureOpenAIPluginCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureOpenAIPluginCollectionExtensionsTests.cs
new file mode 100644
index 000000000000..bbfb636196d3
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureOpenAIPluginCollectionExtensionsTests.cs
@@ -0,0 +1,75 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using Azure.AI.OpenAI;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+
+namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Core;
+
+///
+/// Unit tests for class.
+///
+public sealed class AzureOpenAIPluginCollectionExtensionsTests
+{
+ [Fact]
+ public void TryGetFunctionAndArgumentsWithNonExistingFunctionReturnsFalse()
+ {
+ // Arrange
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin");
+ var plugins = new KernelPluginCollection([plugin]);
+
+ var toolCall = new ChatCompletionsFunctionToolCall("id", "MyPlugin_MyFunction", string.Empty);
+
+ // Act
+ var result = plugins.TryGetFunctionAndArguments(toolCall, out var actualFunction, out var actualArguments);
+
+ // Assert
+ Assert.False(result);
+ Assert.Null(actualFunction);
+ Assert.Null(actualArguments);
+ }
+
+ [Fact]
+ public void TryGetFunctionAndArgumentsWithoutArgumentsReturnsTrue()
+ {
+ // Arrange
+ var function = KernelFunctionFactory.CreateFromMethod(() => "Result", "MyFunction");
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]);
+
+ var plugins = new KernelPluginCollection([plugin]);
+ var toolCall = new ChatCompletionsFunctionToolCall("id", "MyPlugin-MyFunction", string.Empty);
+
+ // Act
+ var result = plugins.TryGetFunctionAndArguments(toolCall, out var actualFunction, out var actualArguments);
+
+ // Assert
+ Assert.True(result);
+ Assert.Equal(function.Name, actualFunction?.Name);
+ Assert.Null(actualArguments);
+ }
+
+ [Fact]
+ public void TryGetFunctionAndArgumentsWithArgumentsReturnsTrue()
+ {
+ // Arrange
+ var function = KernelFunctionFactory.CreateFromMethod(() => "Result", "MyFunction");
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]);
+
+ var plugins = new KernelPluginCollection([plugin]);
+ var toolCall = new ChatCompletionsFunctionToolCall("id", "MyPlugin-MyFunction", "{\n \"location\": \"San Diego\",\n \"max_price\": 300\n,\n \"null_argument\": null\n}");
+
+ // Act
+ var result = plugins.TryGetFunctionAndArguments(toolCall, out var actualFunction, out var actualArguments);
+
+ // Assert
+ Assert.True(result);
+ Assert.Equal(function.Name, actualFunction?.Name);
+
+ Assert.NotNull(actualArguments);
+
+ Assert.Equal("San Diego", actualArguments["location"]);
+ Assert.Equal("300", actualArguments["max_price"]);
+
+ Assert.Null(actualArguments["null_argument"]);
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureOpenAIStreamingTextContentTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureOpenAIStreamingTextContentTests.cs
new file mode 100644
index 000000000000..a58df5676aca
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureOpenAIStreamingTextContentTests.cs
@@ -0,0 +1,41 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Text;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+
+namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Core;
+
+///
+/// Unit tests for class.
+///
+public sealed class AzureOpenAIStreamingTextContentTests
+{
+ [Fact]
+ public void ToByteArrayWorksCorrectly()
+ {
+ // Arrange
+ var expectedBytes = Encoding.UTF8.GetBytes("content");
+ var content = new AzureOpenAIStreamingTextContent("content", 0, "model-id");
+
+ // Act
+ var actualBytes = content.ToByteArray();
+
+ // Assert
+ Assert.Equal(expectedBytes, actualBytes);
+ }
+
+ [Theory]
+ [InlineData(null, "")]
+ [InlineData("content", "content")]
+ public void ToStringWorksCorrectly(string? content, string expectedString)
+ {
+ // Arrange
+ var textContent = new AzureOpenAIStreamingTextContent(content!, 0, "model-id");
+
+ // Act
+ var actualString = textContent.ToString();
+
+ // Assert
+ Assert.Equal(expectedString, actualString);
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/RequestFailedExceptionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/RequestFailedExceptionExtensionsTests.cs
new file mode 100644
index 000000000000..9fb65039116d
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/RequestFailedExceptionExtensionsTests.cs
@@ -0,0 +1,77 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Net;
+using Azure;
+using Azure.Core;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+
+namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Core;
+
+///
+/// Unit tests for class.
+///
+public sealed class RequestFailedExceptionExtensionsTests
+{
+ [Theory]
+ [InlineData(0, null)]
+ [InlineData(500, HttpStatusCode.InternalServerError)]
+ public void ToHttpOperationExceptionWithStatusReturnsValidException(int responseStatus, HttpStatusCode? httpStatusCode)
+ {
+ // Arrange
+ var exception = new RequestFailedException(responseStatus, "Error Message");
+
+ // Act
+ var actualException = exception.ToHttpOperationException();
+
+ // Assert
+ Assert.IsType(actualException);
+ Assert.Equal(httpStatusCode, actualException.StatusCode);
+ Assert.Equal("Error Message", actualException.Message);
+ Assert.Same(exception, actualException.InnerException);
+ }
+
+ [Fact]
+ public void ToHttpOperationExceptionWithContentReturnsValidException()
+ {
+ // Arrange
+ using var response = new FakeResponse("Response Content", 500);
+ var exception = new RequestFailedException(response);
+
+ // Act
+ var actualException = exception.ToHttpOperationException();
+
+ // Assert
+ Assert.IsType(actualException);
+ Assert.Equal(HttpStatusCode.InternalServerError, actualException.StatusCode);
+ Assert.Equal("Response Content", actualException.ResponseContent);
+ Assert.Same(exception, actualException.InnerException);
+ }
+
+ #region private
+
+ private sealed class FakeResponse(string responseContent, int status) : Response
+ {
+ private readonly string _responseContent = responseContent;
+ private readonly IEnumerable _headers = [];
+
+ public override BinaryData Content => BinaryData.FromString(this._responseContent);
+ public override int Status { get; } = status;
+ public override string ReasonPhrase => "Reason Phrase";
+ public override Stream? ContentStream { get => null; set => throw new NotImplementedException(); }
+ public override string ClientRequestId { get => "Client Request Id"; set => throw new NotImplementedException(); }
+
+ public override void Dispose() { }
+ protected override bool ContainsHeader(string name) => throw new NotImplementedException();
+ protected override IEnumerable EnumerateHeaders() => this._headers;
+#pragma warning disable CS8765 // Nullability of type of parameter doesn't match overridden member (possibly because of nullability attributes).
+ protected override bool TryGetHeader(string name, out string? value) => throw new NotImplementedException();
+ protected override bool TryGetHeaderValues(string name, out IEnumerable? values) => throw new NotImplementedException();
+#pragma warning restore CS8765 // Nullability of type of parameter doesn't match overridden member (possibly because of nullability attributes).
+ }
+
+ #endregion
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/FunctionCalling/AutoFunctionInvocationFilterTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/FunctionCalling/AutoFunctionInvocationFilterTests.cs
new file mode 100644
index 000000000000..270b055d730c
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/FunctionCalling/AutoFunctionInvocationFilterTests.cs
@@ -0,0 +1,629 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Net;
+using System.Net.Http;
+using System.Threading.Tasks;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+
+namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.FunctionCalling;
+
+public sealed class AutoFunctionInvocationFilterTests : IDisposable
+{
+ private readonly MultipleHttpMessageHandlerStub _messageHandlerStub;
+ private readonly HttpClient _httpClient;
+
+ public AutoFunctionInvocationFilterTests()
+ {
+ this._messageHandlerStub = new MultipleHttpMessageHandlerStub();
+
+ this._httpClient = new HttpClient(this._messageHandlerStub, false);
+ }
+
+ [Fact]
+ public async Task FiltersAreExecutedCorrectlyAsync()
+ {
+ // Arrange
+ int filterInvocations = 0;
+ int functionInvocations = 0;
+ int[] expectedRequestSequenceNumbers = [0, 0, 1, 1];
+ int[] expectedFunctionSequenceNumbers = [0, 1, 0, 1];
+ List requestSequenceNumbers = [];
+ List functionSequenceNumbers = [];
+ Kernel? contextKernel = null;
+
+ var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { functionInvocations++; return parameter; }, "Function1");
+ var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { functionInvocations++; return parameter; }, "Function2");
+
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]);
+
+ var kernel = this.GetKernelWithFilter(plugin, async (context, next) =>
+ {
+ contextKernel = context.Kernel;
+
+ if (context.ChatHistory.Last() is AzureOpenAIChatMessageContent content)
+ {
+ Assert.Equal(2, content.ToolCalls.Count);
+ }
+
+ requestSequenceNumbers.Add(context.RequestSequenceIndex);
+ functionSequenceNumbers.Add(context.FunctionSequenceIndex);
+
+ await next(context);
+
+ filterInvocations++;
+ });
+
+ this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses();
+
+ // Act
+ var result = await kernel.InvokePromptAsync("Test prompt", new(new AzureOpenAIPromptExecutionSettings
+ {
+ ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions
+ }));
+
+ // Assert
+ Assert.Equal(4, filterInvocations);
+ Assert.Equal(4, functionInvocations);
+ Assert.Equal(expectedRequestSequenceNumbers, requestSequenceNumbers);
+ Assert.Equal(expectedFunctionSequenceNumbers, functionSequenceNumbers);
+ Assert.Same(kernel, contextKernel);
+ Assert.Equal("Test chat response", result.ToString());
+ }
+
+ [Fact]
+ public async Task FiltersAreExecutedCorrectlyOnStreamingAsync()
+ {
+ // Arrange
+ int filterInvocations = 0;
+ int functionInvocations = 0;
+ List requestSequenceNumbers = [];
+ List functionSequenceNumbers = [];
+
+ var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { functionInvocations++; return parameter; }, "Function1");
+ var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { functionInvocations++; return parameter; }, "Function2");
+
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]);
+
+ var kernel = this.GetKernelWithFilter(plugin, async (context, next) =>
+ {
+ if (context.ChatHistory.Last() is AzureOpenAIChatMessageContent content)
+ {
+ Assert.Equal(2, content.ToolCalls.Count);
+ }
+
+ requestSequenceNumbers.Add(context.RequestSequenceIndex);
+ functionSequenceNumbers.Add(context.FunctionSequenceIndex);
+
+ await next(context);
+
+ filterInvocations++;
+ });
+
+ this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingStreamingResponses();
+
+ var executionSettings = new AzureOpenAIPromptExecutionSettings { ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions };
+
+ // Act
+ await foreach (var item in kernel.InvokePromptStreamingAsync("Test prompt", new(executionSettings)))
+ { }
+
+ // Assert
+ Assert.Equal(4, filterInvocations);
+ Assert.Equal(4, functionInvocations);
+ Assert.Equal([0, 0, 1, 1], requestSequenceNumbers);
+ Assert.Equal([0, 1, 0, 1], functionSequenceNumbers);
+ }
+
+ [Fact]
+ public async Task DifferentWaysOfAddingFiltersWorkCorrectlyAsync()
+ {
+ // Arrange
+ var function = KernelFunctionFactory.CreateFromMethod(() => "Result");
+ var executionOrder = new List();
+
+ var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function1");
+ var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function2");
+
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]);
+
+ var filter1 = new AutoFunctionInvocationFilter(async (context, next) =>
+ {
+ executionOrder.Add("Filter1-Invoking");
+ await next(context);
+ });
+
+ var filter2 = new AutoFunctionInvocationFilter(async (context, next) =>
+ {
+ executionOrder.Add("Filter2-Invoking");
+ await next(context);
+ });
+
+ var builder = Kernel.CreateBuilder();
+
+ builder.Plugins.Add(plugin);
+
+ builder.Services.AddSingleton((serviceProvider) =>
+ {
+ return new AzureOpenAIChatCompletionService("test-deployment", "https://endpoint", "test-api-key", "test-model-id", this._httpClient);
+ });
+
+ this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses();
+
+ // Act
+
+ // Case #1 - Add filter to services
+ builder.Services.AddSingleton(filter1);
+
+ var kernel = builder.Build();
+
+ // Case #2 - Add filter to kernel
+ kernel.AutoFunctionInvocationFilters.Add(filter2);
+
+ var result = await kernel.InvokePromptAsync("Test prompt", new(new AzureOpenAIPromptExecutionSettings
+ {
+ ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions
+ }));
+
+ // Assert
+ Assert.Equal("Filter1-Invoking", executionOrder[0]);
+ Assert.Equal("Filter2-Invoking", executionOrder[1]);
+ }
+
+ [Theory]
+ [InlineData(true)]
+ [InlineData(false)]
+ public async Task MultipleFiltersAreExecutedInOrderAsync(bool isStreaming)
+ {
+ // Arrange
+ var function = KernelFunctionFactory.CreateFromMethod(() => "Result");
+ var executionOrder = new List();
+
+ var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function1");
+ var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function2");
+
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]);
+
+ var filter1 = new AutoFunctionInvocationFilter(async (context, next) =>
+ {
+ executionOrder.Add("Filter1-Invoking");
+ await next(context);
+ executionOrder.Add("Filter1-Invoked");
+ });
+
+ var filter2 = new AutoFunctionInvocationFilter(async (context, next) =>
+ {
+ executionOrder.Add("Filter2-Invoking");
+ await next(context);
+ executionOrder.Add("Filter2-Invoked");
+ });
+
+ var filter3 = new AutoFunctionInvocationFilter(async (context, next) =>
+ {
+ executionOrder.Add("Filter3-Invoking");
+ await next(context);
+ executionOrder.Add("Filter3-Invoked");
+ });
+
+ var builder = Kernel.CreateBuilder();
+
+ builder.Plugins.Add(plugin);
+
+ builder.Services.AddSingleton((serviceProvider) =>
+ {
+ return new AzureOpenAIChatCompletionService("test-deployment", "https://endpoint", "test-api-key", "test-model-id", this._httpClient);
+ });
+
+ builder.Services.AddSingleton(filter1);
+ builder.Services.AddSingleton(filter2);
+ builder.Services.AddSingleton(filter3);
+
+ var kernel = builder.Build();
+
+ var arguments = new KernelArguments(new AzureOpenAIPromptExecutionSettings
+ {
+ ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions
+ });
+
+ // Act
+ if (isStreaming)
+ {
+ this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingStreamingResponses();
+
+ await foreach (var item in kernel.InvokePromptStreamingAsync("Test prompt", arguments))
+ { }
+ }
+ else
+ {
+ this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses();
+
+ await kernel.InvokePromptAsync("Test prompt", arguments);
+ }
+
+ // Assert
+ Assert.Equal("Filter1-Invoking", executionOrder[0]);
+ Assert.Equal("Filter2-Invoking", executionOrder[1]);
+ Assert.Equal("Filter3-Invoking", executionOrder[2]);
+ Assert.Equal("Filter3-Invoked", executionOrder[3]);
+ Assert.Equal("Filter2-Invoked", executionOrder[4]);
+ Assert.Equal("Filter1-Invoked", executionOrder[5]);
+ }
+
+ [Fact]
+ public async Task FilterCanOverrideArgumentsAsync()
+ {
+ // Arrange
+ const string NewValue = "NewValue";
+
+ var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { return parameter; }, "Function1");
+ var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { return parameter; }, "Function2");
+
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]);
+
+ var kernel = this.GetKernelWithFilter(plugin, async (context, next) =>
+ {
+ context.Arguments!["parameter"] = NewValue;
+ await next(context);
+ context.Terminate = true;
+ });
+
+ this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses();
+
+ // Act
+ var result = await kernel.InvokePromptAsync("Test prompt", new(new AzureOpenAIPromptExecutionSettings
+ {
+ ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions
+ }));
+
+ // Assert
+ Assert.Equal("NewValue", result.ToString());
+ }
+
+ [Fact]
+ public async Task FilterCanHandleExceptionAsync()
+ {
+ // Arrange
+ var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { throw new KernelException("Exception from Function1"); }, "Function1");
+ var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => "Result from Function2", "Function2");
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]);
+
+ var kernel = this.GetKernelWithFilter(plugin, async (context, next) =>
+ {
+ try
+ {
+ await next(context);
+ }
+ catch (KernelException exception)
+ {
+ Assert.Equal("Exception from Function1", exception.Message);
+ context.Result = new FunctionResult(context.Result, "Result from filter");
+ }
+ });
+
+ this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses();
+
+ var chatCompletion = new AzureOpenAIChatCompletionService("test-deployment", "https://endpoint", "test-api-key", "test-model-id", this._httpClient);
+
+ var executionSettings = new AzureOpenAIPromptExecutionSettings { ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions };
+
+ var chatHistory = new ChatHistory();
+
+ // Act
+ var result = await chatCompletion.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel);
+
+ var firstFunctionResult = chatHistory[^2].Content;
+ var secondFunctionResult = chatHistory[^1].Content;
+
+ // Assert
+ Assert.Equal("Result from filter", firstFunctionResult);
+ Assert.Equal("Result from Function2", secondFunctionResult);
+ }
+
+ [Fact]
+ public async Task FilterCanHandleExceptionOnStreamingAsync()
+ {
+ // Arrange
+ var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { throw new KernelException("Exception from Function1"); }, "Function1");
+ var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => "Result from Function2", "Function2");
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]);
+
+ var kernel = this.GetKernelWithFilter(plugin, async (context, next) =>
+ {
+ try
+ {
+ await next(context);
+ }
+ catch (KernelException)
+ {
+ context.Result = new FunctionResult(context.Result, "Result from filter");
+ }
+ });
+
+ this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingStreamingResponses();
+
+ var chatCompletion = new AzureOpenAIChatCompletionService("test-deployment", "https://endpoint", "test-api-key", "test-model-id", this._httpClient);
+
+ var chatHistory = new ChatHistory();
+ var executionSettings = new AzureOpenAIPromptExecutionSettings { ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions };
+
+ // Act
+ await foreach (var item in chatCompletion.GetStreamingChatMessageContentsAsync(chatHistory, executionSettings, kernel))
+ { }
+
+ var firstFunctionResult = chatHistory[^2].Content;
+ var secondFunctionResult = chatHistory[^1].Content;
+
+ // Assert
+ Assert.Equal("Result from filter", firstFunctionResult);
+ Assert.Equal("Result from Function2", secondFunctionResult);
+ }
+
+ [Fact]
+ public async Task FiltersCanSkipFunctionExecutionAsync()
+ {
+ // Arrange
+ int filterInvocations = 0;
+ int firstFunctionInvocations = 0;
+ int secondFunctionInvocations = 0;
+
+ var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { firstFunctionInvocations++; return parameter; }, "Function1");
+ var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { secondFunctionInvocations++; return parameter; }, "Function2");
+
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]);
+
+ var kernel = this.GetKernelWithFilter(plugin, async (context, next) =>
+ {
+ // Filter delegate is invoked only for second function, the first one should be skipped.
+ if (context.Function.Name == "Function2")
+ {
+ await next(context);
+ }
+
+ filterInvocations++;
+ });
+
+ using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("filters_multiple_function_calls_test_response.json")) };
+ using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) };
+
+ this._messageHandlerStub.ResponsesToReturn = [response1, response2];
+
+ // Act
+ var result = await kernel.InvokePromptAsync("Test prompt", new(new AzureOpenAIPromptExecutionSettings
+ {
+ ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions
+ }));
+
+ // Assert
+ Assert.Equal(2, filterInvocations);
+ Assert.Equal(0, firstFunctionInvocations);
+ Assert.Equal(1, secondFunctionInvocations);
+ }
+
+ [Fact]
+ public async Task PreFilterCanTerminateOperationAsync()
+ {
+ // Arrange
+ int firstFunctionInvocations = 0;
+ int secondFunctionInvocations = 0;
+
+ var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { firstFunctionInvocations++; return parameter; }, "Function1");
+ var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { secondFunctionInvocations++; return parameter; }, "Function2");
+
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]);
+
+ var kernel = this.GetKernelWithFilter(plugin, async (context, next) =>
+ {
+ // Terminating before first function, so all functions won't be invoked.
+ context.Terminate = true;
+
+ await next(context);
+ });
+
+ this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses();
+
+ // Act
+ await kernel.InvokePromptAsync("Test prompt", new(new AzureOpenAIPromptExecutionSettings
+ {
+ ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions
+ }));
+
+ // Assert
+ Assert.Equal(0, firstFunctionInvocations);
+ Assert.Equal(0, secondFunctionInvocations);
+ }
+
+ [Fact]
+ public async Task PreFilterCanTerminateOperationOnStreamingAsync()
+ {
+ // Arrange
+ int firstFunctionInvocations = 0;
+ int secondFunctionInvocations = 0;
+
+ var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { firstFunctionInvocations++; return parameter; }, "Function1");
+ var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { secondFunctionInvocations++; return parameter; }, "Function2");
+
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]);
+
+ var kernel = this.GetKernelWithFilter(plugin, async (context, next) =>
+ {
+ // Terminating before first function, so all functions won't be invoked.
+ context.Terminate = true;
+
+ await next(context);
+ });
+
+ this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingStreamingResponses();
+
+ var executionSettings = new AzureOpenAIPromptExecutionSettings { ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions };
+
+ // Act
+ await foreach (var item in kernel.InvokePromptStreamingAsync("Test prompt", new(executionSettings)))
+ { }
+
+ // Assert
+ Assert.Equal(0, firstFunctionInvocations);
+ Assert.Equal(0, secondFunctionInvocations);
+ }
+
+ [Fact]
+ public async Task PostFilterCanTerminateOperationAsync()
+ {
+ // Arrange
+ int firstFunctionInvocations = 0;
+ int secondFunctionInvocations = 0;
+ List requestSequenceNumbers = [];
+ List functionSequenceNumbers = [];
+
+ var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { firstFunctionInvocations++; return parameter; }, "Function1");
+ var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { secondFunctionInvocations++; return parameter; }, "Function2");
+
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]);
+
+ var kernel = this.GetKernelWithFilter(plugin, async (context, next) =>
+ {
+ requestSequenceNumbers.Add(context.RequestSequenceIndex);
+ functionSequenceNumbers.Add(context.FunctionSequenceIndex);
+
+ await next(context);
+
+ // Terminating after first function, so second function won't be invoked.
+ context.Terminate = true;
+ });
+
+ this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses();
+
+ // Act
+ var result = await kernel.InvokePromptAsync("Test prompt", new(new AzureOpenAIPromptExecutionSettings
+ {
+ ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions
+ }));
+
+ // Assert
+ Assert.Equal(1, firstFunctionInvocations);
+ Assert.Equal(0, secondFunctionInvocations);
+ Assert.Equal([0], requestSequenceNumbers);
+ Assert.Equal([0], functionSequenceNumbers);
+
+ // Results of function invoked before termination should be returned
+ var lastMessageContent = result.GetValue();
+ Assert.NotNull(lastMessageContent);
+
+ Assert.Equal("function1-value", lastMessageContent.Content);
+ Assert.Equal(AuthorRole.Tool, lastMessageContent.Role);
+ }
+
+ [Fact]
+ public async Task PostFilterCanTerminateOperationOnStreamingAsync()
+ {
+ // Arrange
+ int firstFunctionInvocations = 0;
+ int secondFunctionInvocations = 0;
+ List requestSequenceNumbers = [];
+ List functionSequenceNumbers = [];
+
+ var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { firstFunctionInvocations++; return parameter; }, "Function1");
+ var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { secondFunctionInvocations++; return parameter; }, "Function2");
+
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]);
+
+ var kernel = this.GetKernelWithFilter(plugin, async (context, next) =>
+ {
+ requestSequenceNumbers.Add(context.RequestSequenceIndex);
+ functionSequenceNumbers.Add(context.FunctionSequenceIndex);
+
+ await next(context);
+
+ // Terminating after first function, so second function won't be invoked.
+ context.Terminate = true;
+ });
+
+ this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingStreamingResponses();
+
+ var executionSettings = new AzureOpenAIPromptExecutionSettings { ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions };
+
+ List streamingContent = [];
+
+ // Act
+ await foreach (var item in kernel.InvokePromptStreamingAsync("Test prompt", new(executionSettings)))
+ {
+ streamingContent.Add(item);
+ }
+
+ // Assert
+ Assert.Equal(1, firstFunctionInvocations);
+ Assert.Equal(0, secondFunctionInvocations);
+ Assert.Equal([0], requestSequenceNumbers);
+ Assert.Equal([0], functionSequenceNumbers);
+
+ // Results of function invoked before termination should be returned
+ Assert.Equal(3, streamingContent.Count);
+
+ var lastMessageContent = streamingContent[^1] as StreamingChatMessageContent;
+ Assert.NotNull(lastMessageContent);
+
+ Assert.Equal("function1-value", lastMessageContent.Content);
+ Assert.Equal(AuthorRole.Tool, lastMessageContent.Role);
+ }
+
+ public void Dispose()
+ {
+ this._httpClient.Dispose();
+ this._messageHandlerStub.Dispose();
+ }
+
+ #region private
+
+#pragma warning disable CA2000 // Dispose objects before losing scope
+ private static List GetFunctionCallingResponses()
+ {
+ return [
+ new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("filters_multiple_function_calls_test_response.json")) },
+ new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("filters_multiple_function_calls_test_response.json")) },
+ new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) }
+ ];
+ }
+
+ private static List GetFunctionCallingStreamingResponses()
+ {
+ return [
+ new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("filters_streaming_multiple_function_calls_test_response.txt")) },
+ new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("filters_streaming_multiple_function_calls_test_response.txt")) },
+ new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt")) }
+ ];
+ }
+#pragma warning restore CA2000
+
+ private Kernel GetKernelWithFilter(
+ KernelPlugin plugin,
+ Func, Task>? onAutoFunctionInvocation)
+ {
+ var builder = Kernel.CreateBuilder();
+ var filter = new AutoFunctionInvocationFilter(onAutoFunctionInvocation);
+
+ builder.Plugins.Add(plugin);
+ builder.Services.AddSingleton(filter);
+
+ builder.Services.AddSingleton((serviceProvider) =>
+ {
+ return new AzureOpenAIChatCompletionService("test-deployment", "https://endpoint", "test-api-key", "test-model-id", this._httpClient);
+ });
+
+ return builder.Build();
+ }
+
+ private sealed class AutoFunctionInvocationFilter(
+ Func, Task>? onAutoFunctionInvocation) : IAutoFunctionInvocationFilter
+ {
+ private readonly Func, Task>? _onAutoFunctionInvocation = onAutoFunctionInvocation;
+
+ public Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func next) =>
+ this._onAutoFunctionInvocation?.Invoke(context, next) ?? Task.CompletedTask;
+ }
+
+ #endregion
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/FunctionCalling/AzureOpenAIFunctionTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/FunctionCalling/AzureOpenAIFunctionTests.cs
new file mode 100644
index 000000000000..bd268ef67991
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/FunctionCalling/AzureOpenAIFunctionTests.cs
@@ -0,0 +1,188 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Collections.Generic;
+using System.ComponentModel;
+using System.Linq;
+using System.Text.Json;
+using Azure.AI.OpenAI;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+
+namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.FunctionCalling;
+
+public sealed class AzureOpenAIFunctionTests
+{
+ [Theory]
+ [InlineData(null, null, "", "")]
+ [InlineData("name", "description", "name", "description")]
+ public void ItInitializesOpenAIFunctionParameterCorrectly(string? name, string? description, string expectedName, string expectedDescription)
+ {
+ // Arrange & Act
+ var schema = KernelJsonSchema.Parse("{\"type\": \"object\" }");
+ var functionParameter = new AzureOpenAIFunctionParameter(name, description, true, typeof(string), schema);
+
+ // Assert
+ Assert.Equal(expectedName, functionParameter.Name);
+ Assert.Equal(expectedDescription, functionParameter.Description);
+ Assert.True(functionParameter.IsRequired);
+ Assert.Equal(typeof(string), functionParameter.ParameterType);
+ Assert.Same(schema, functionParameter.Schema);
+ }
+
+ [Theory]
+ [InlineData(null, "")]
+ [InlineData("description", "description")]
+ public void ItInitializesOpenAIFunctionReturnParameterCorrectly(string? description, string expectedDescription)
+ {
+ // Arrange & Act
+ var schema = KernelJsonSchema.Parse("{\"type\": \"object\" }");
+ var functionParameter = new AzureOpenAIFunctionReturnParameter(description, typeof(string), schema);
+
+ // Assert
+ Assert.Equal(expectedDescription, functionParameter.Description);
+ Assert.Equal(typeof(string), functionParameter.ParameterType);
+ Assert.Same(schema, functionParameter.Schema);
+ }
+
+ [Fact]
+ public void ItCanConvertToFunctionDefinitionWithNoPluginName()
+ {
+ // Arrange
+ AzureOpenAIFunction sut = KernelFunctionFactory.CreateFromMethod(() => { }, "myfunc", "This is a description of the function.").Metadata.ToAzureOpenAIFunction();
+
+ // Act
+ FunctionDefinition result = sut.ToFunctionDefinition();
+
+ // Assert
+ Assert.Equal(sut.FunctionName, result.Name);
+ Assert.Equal(sut.Description, result.Description);
+ }
+
+ [Fact]
+ public void ItCanConvertToFunctionDefinitionWithNullParameters()
+ {
+ // Arrange
+ AzureOpenAIFunction sut = new("plugin", "function", "description", null, null);
+
+ // Act
+ var result = sut.ToFunctionDefinition();
+
+ // Assert
+ Assert.Equal("{\"type\":\"object\",\"required\":[],\"properties\":{}}", result.Parameters.ToString());
+ }
+
+ [Fact]
+ public void ItCanConvertToFunctionDefinitionWithPluginName()
+ {
+ // Arrange
+ AzureOpenAIFunction sut = KernelPluginFactory.CreateFromFunctions("myplugin", new[]
+ {
+ KernelFunctionFactory.CreateFromMethod(() => { }, "myfunc", "This is a description of the function.")
+ }).GetFunctionsMetadata()[0].ToAzureOpenAIFunction();
+
+ // Act
+ FunctionDefinition result = sut.ToFunctionDefinition();
+
+ // Assert
+ Assert.Equal("myplugin-myfunc", result.Name);
+ Assert.Equal(sut.Description, result.Description);
+ }
+
+ [Fact]
+ public void ItCanConvertToFunctionDefinitionsWithParameterTypesAndReturnParameterType()
+ {
+ string expectedParameterSchema = """{ "type": "object", "required": ["param1", "param2"], "properties": { "param1": { "type": "string", "description": "String param 1" }, "param2": { "type": "integer", "description": "Int param 2" } } } """;
+
+ KernelPlugin plugin = KernelPluginFactory.CreateFromFunctions("Tests", new[]
+ {
+ KernelFunctionFactory.CreateFromMethod(
+ [return: Description("My test Result")] ([Description("String param 1")] string param1, [Description("Int param 2")] int param2) => "",
+ "TestFunction",
+ "My test function")
+ });
+
+ AzureOpenAIFunction sut = plugin.GetFunctionsMetadata()[0].ToAzureOpenAIFunction();
+
+ FunctionDefinition functionDefinition = sut.ToFunctionDefinition();
+
+ var exp = JsonSerializer.Serialize(KernelJsonSchema.Parse(expectedParameterSchema));
+ var act = JsonSerializer.Serialize(KernelJsonSchema.Parse(functionDefinition.Parameters));
+
+ Assert.NotNull(functionDefinition);
+ Assert.Equal("Tests-TestFunction", functionDefinition.Name);
+ Assert.Equal("My test function", functionDefinition.Description);
+ Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse(expectedParameterSchema)), JsonSerializer.Serialize(KernelJsonSchema.Parse(functionDefinition.Parameters)));
+ }
+
+ [Fact]
+ public void ItCanConvertToFunctionDefinitionsWithParameterTypesAndNoReturnParameterType()
+ {
+ string expectedParameterSchema = """{ "type": "object", "required": ["param1", "param2"], "properties": { "param1": { "type": "string", "description": "String param 1" }, "param2": { "type": "integer", "description": "Int param 2" } } } """;
+
+ KernelPlugin plugin = KernelPluginFactory.CreateFromFunctions("Tests", new[]
+ {
+ KernelFunctionFactory.CreateFromMethod(
+ [return: Description("My test Result")] ([Description("String param 1")] string param1, [Description("Int param 2")] int param2) => { },
+ "TestFunction",
+ "My test function")
+ });
+
+ AzureOpenAIFunction sut = plugin.GetFunctionsMetadata()[0].ToAzureOpenAIFunction();
+
+ FunctionDefinition functionDefinition = sut.ToFunctionDefinition();
+
+ Assert.NotNull(functionDefinition);
+ Assert.Equal("Tests-TestFunction", functionDefinition.Name);
+ Assert.Equal("My test function", functionDefinition.Description);
+ Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse(expectedParameterSchema)), JsonSerializer.Serialize(KernelJsonSchema.Parse(functionDefinition.Parameters)));
+ }
+
+ [Fact]
+ public void ItCanConvertToFunctionDefinitionsWithNoParameterTypes()
+ {
+ // Arrange
+ AzureOpenAIFunction f = KernelFunctionFactory.CreateFromMethod(
+ () => { },
+ parameters: [new KernelParameterMetadata("param1")]).Metadata.ToAzureOpenAIFunction();
+
+ // Act
+ FunctionDefinition result = f.ToFunctionDefinition();
+ ParametersData pd = JsonSerializer.Deserialize(result.Parameters.ToString())!;
+
+ // Assert
+ Assert.NotNull(pd.properties);
+ Assert.Single(pd.properties);
+ Assert.Equal(
+ JsonSerializer.Serialize(KernelJsonSchema.Parse("""{ "type":"string" }""")),
+ JsonSerializer.Serialize(pd.properties.First().Value.RootElement));
+ }
+
+ [Fact]
+ public void ItCanConvertToFunctionDefinitionsWithNoParameterTypesButWithDescriptions()
+ {
+ // Arrange
+ AzureOpenAIFunction f = KernelFunctionFactory.CreateFromMethod(
+ () => { },
+ parameters: [new KernelParameterMetadata("param1") { Description = "something neat" }]).Metadata.ToAzureOpenAIFunction();
+
+ // Act
+ FunctionDefinition result = f.ToFunctionDefinition();
+ ParametersData pd = JsonSerializer.Deserialize(result.Parameters.ToString())!;
+
+ // Assert
+ Assert.NotNull(pd.properties);
+ Assert.Single(pd.properties);
+ Assert.Equal(
+ JsonSerializer.Serialize(KernelJsonSchema.Parse("""{ "type":"string", "description":"something neat" }""")),
+ JsonSerializer.Serialize(pd.properties.First().Value.RootElement));
+ }
+
+#pragma warning disable CA1812 // uninstantiated internal class
+ private sealed class ParametersData
+ {
+ public string? type { get; set; }
+ public string[]? required { get; set; }
+ public Dictionary? properties { get; set; }
+ }
+#pragma warning restore CA1812
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/FunctionCalling/KernelFunctionMetadataExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/FunctionCalling/KernelFunctionMetadataExtensionsTests.cs
new file mode 100644
index 000000000000..ebf7b67a2f9b
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/FunctionCalling/KernelFunctionMetadataExtensionsTests.cs
@@ -0,0 +1,256 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.ComponentModel;
+using System.Linq;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+
+#pragma warning disable CA1812 // Uninstantiated internal types
+
+namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.FunctionCalling;
+
+public sealed class KernelFunctionMetadataExtensionsTests
+{
+ [Fact]
+ public void ItCanConvertToAzureOpenAIFunctionNoParameters()
+ {
+ // Arrange
+ var sut = new KernelFunctionMetadata("foo")
+ {
+ PluginName = "bar",
+ Description = "baz",
+ ReturnParameter = new KernelReturnParameterMetadata
+ {
+ Description = "retDesc",
+ Schema = KernelJsonSchema.Parse("""{"type": "object" }"""),
+ }
+ };
+
+ // Act
+ var result = sut.ToAzureOpenAIFunction();
+
+ // Assert
+ Assert.Equal(sut.Name, result.FunctionName);
+ Assert.Equal(sut.PluginName, result.PluginName);
+ Assert.Equal(sut.Description, result.Description);
+ Assert.Equal($"{sut.PluginName}-{sut.Name}", result.FullyQualifiedName);
+
+ Assert.NotNull(result.ReturnParameter);
+ Assert.Equal("retDesc", result.ReturnParameter.Description);
+ Assert.Equivalent(KernelJsonSchema.Parse("""{"type": "object" }"""), result.ReturnParameter.Schema);
+ Assert.Null(result.ReturnParameter.ParameterType);
+ }
+
+ [Fact]
+ public void ItCanConvertToAzureOpenAIFunctionNoPluginName()
+ {
+ // Arrange
+ var sut = new KernelFunctionMetadata("foo")
+ {
+ PluginName = string.Empty,
+ Description = "baz",
+ ReturnParameter = new KernelReturnParameterMetadata
+ {
+ Description = "retDesc",
+ Schema = KernelJsonSchema.Parse("""{"type": "object" }"""),
+ }
+ };
+
+ // Act
+ var result = sut.ToAzureOpenAIFunction();
+
+ // Assert
+ Assert.Equal(sut.Name, result.FunctionName);
+ Assert.Equal(sut.PluginName, result.PluginName);
+ Assert.Equal(sut.Description, result.Description);
+ Assert.Equal(sut.Name, result.FullyQualifiedName);
+
+ Assert.NotNull(result.ReturnParameter);
+ Assert.Equal("retDesc", result.ReturnParameter.Description);
+ Assert.Equivalent(KernelJsonSchema.Parse("""{"type": "object" }"""), result.ReturnParameter.Schema);
+ Assert.Null(result.ReturnParameter.ParameterType);
+ }
+
+ [Theory]
+ [InlineData(false)]
+ [InlineData(true)]
+ public void ItCanConvertToAzureOpenAIFunctionWithParameter(bool withSchema)
+ {
+ // Arrange
+ var param1 = new KernelParameterMetadata("param1")
+ {
+ Description = "This is param1",
+ DefaultValue = "1",
+ ParameterType = typeof(int),
+ IsRequired = false,
+ Schema = withSchema ? KernelJsonSchema.Parse("""{"type":"integer"}""") : null,
+ };
+
+ var sut = new KernelFunctionMetadata("foo")
+ {
+ PluginName = "bar",
+ Description = "baz",
+ Parameters = [param1],
+ ReturnParameter = new KernelReturnParameterMetadata
+ {
+ Description = "retDesc",
+ Schema = KernelJsonSchema.Parse("""{"type": "object" }"""),
+ }
+ };
+
+ // Act
+ var result = sut.ToAzureOpenAIFunction();
+ var outputParam = result.Parameters![0];
+
+ // Assert
+ Assert.Equal(param1.Name, outputParam.Name);
+ Assert.Equal("This is param1 (default value: 1)", outputParam.Description);
+ Assert.Equal(param1.IsRequired, outputParam.IsRequired);
+ Assert.NotNull(outputParam.Schema);
+ Assert.Equal("integer", outputParam.Schema.RootElement.GetProperty("type").GetString());
+
+ Assert.NotNull(result.ReturnParameter);
+ Assert.Equal("retDesc", result.ReturnParameter.Description);
+ Assert.Equivalent(KernelJsonSchema.Parse("""{"type": "object" }"""), result.ReturnParameter.Schema);
+ Assert.Null(result.ReturnParameter.ParameterType);
+ }
+
+ [Fact]
+ public void ItCanConvertToAzureOpenAIFunctionWithParameterNoType()
+ {
+ // Arrange
+ var param1 = new KernelParameterMetadata("param1") { Description = "This is param1" };
+
+ var sut = new KernelFunctionMetadata("foo")
+ {
+ PluginName = "bar",
+ Description = "baz",
+ Parameters = [param1],
+ ReturnParameter = new KernelReturnParameterMetadata
+ {
+ Description = "retDesc",
+ Schema = KernelJsonSchema.Parse("""{"type": "object" }"""),
+ }
+ };
+
+ // Act
+ var result = sut.ToAzureOpenAIFunction();
+ var outputParam = result.Parameters![0];
+
+ // Assert
+ Assert.Equal(param1.Name, outputParam.Name);
+ Assert.Equal(param1.Description, outputParam.Description);
+ Assert.Equal(param1.IsRequired, outputParam.IsRequired);
+
+ Assert.NotNull(result.ReturnParameter);
+ Assert.Equal("retDesc", result.ReturnParameter.Description);
+ Assert.Equivalent(KernelJsonSchema.Parse("""{"type": "object" }"""), result.ReturnParameter.Schema);
+ Assert.Null(result.ReturnParameter.ParameterType);
+ }
+
+ [Fact]
+ public void ItCanConvertToAzureOpenAIFunctionWithNoReturnParameterType()
+ {
+ // Arrange
+ var param1 = new KernelParameterMetadata("param1")
+ {
+ Description = "This is param1",
+ ParameterType = typeof(int),
+ };
+
+ var sut = new KernelFunctionMetadata("foo")
+ {
+ PluginName = "bar",
+ Description = "baz",
+ Parameters = [param1],
+ };
+
+ // Act
+ var result = sut.ToAzureOpenAIFunction();
+ var outputParam = result.Parameters![0];
+
+ // Assert
+ Assert.Equal(param1.Name, outputParam.Name);
+ Assert.Equal(param1.Description, outputParam.Description);
+ Assert.Equal(param1.IsRequired, outputParam.IsRequired);
+ Assert.NotNull(outputParam.Schema);
+ Assert.Equal("integer", outputParam.Schema.RootElement.GetProperty("type").GetString());
+ }
+
+ [Fact]
+ public void ItCanCreateValidAzureOpenAIFunctionManualForPlugin()
+ {
+ // Arrange
+ var kernel = new Kernel();
+ kernel.Plugins.AddFromType("MyPlugin");
+
+ var functionMetadata = kernel.Plugins["MyPlugin"].First().Metadata;
+
+ var sut = functionMetadata.ToAzureOpenAIFunction();
+
+ // Act
+ var result = sut.ToFunctionDefinition();
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.Equal(
+ """{"type":"object","required":["parameter1","parameter2","parameter3"],"properties":{"parameter1":{"type":"string","description":"String parameter"},"parameter2":{"type":"string","enum":["Value1","Value2"],"description":"Enum parameter"},"parameter3":{"type":"string","format":"date-time","description":"DateTime parameter"}}}""",
+ result.Parameters.ToString()
+ );
+ }
+
+ [Fact]
+ public void ItCanCreateValidAzureOpenAIFunctionManualForPrompt()
+ {
+ // Arrange
+ var promptTemplateConfig = new PromptTemplateConfig("Hello AI")
+ {
+ Description = "My sample function."
+ };
+ promptTemplateConfig.InputVariables.Add(new InputVariable
+ {
+ Name = "parameter1",
+ Description = "String parameter",
+ JsonSchema = """{"type":"string","description":"String parameter"}"""
+ });
+ promptTemplateConfig.InputVariables.Add(new InputVariable
+ {
+ Name = "parameter2",
+ Description = "Enum parameter",
+ JsonSchema = """{"enum":["Value1","Value2"],"description":"Enum parameter"}"""
+ });
+ var function = KernelFunctionFactory.CreateFromPrompt(promptTemplateConfig);
+ var functionMetadata = function.Metadata;
+ var sut = functionMetadata.ToAzureOpenAIFunction();
+
+ // Act
+ var result = sut.ToFunctionDefinition();
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.Equal(
+ """{"type":"object","required":["parameter1","parameter2"],"properties":{"parameter1":{"type":"string","description":"String parameter"},"parameter2":{"enum":["Value1","Value2"],"description":"Enum parameter"}}}""",
+ result.Parameters.ToString()
+ );
+ }
+
+ private enum MyEnum
+ {
+ Value1,
+ Value2
+ }
+
+ private sealed class MyPlugin
+ {
+ [KernelFunction, Description("My sample function.")]
+ public string MyFunction(
+ [Description("String parameter")] string parameter1,
+ [Description("Enum parameter")] MyEnum parameter2,
+ [Description("DateTime parameter")] DateTime parameter3
+ )
+ {
+ return "return";
+ }
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/MultipleHttpMessageHandlerStub.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/MultipleHttpMessageHandlerStub.cs
new file mode 100644
index 000000000000..0af66de6a519
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/MultipleHttpMessageHandlerStub.cs
@@ -0,0 +1,53 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using System.Net.Http;
+using System.Net.Http.Headers;
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace SemanticKernel.Connectors.AzureOpenAI;
+
+internal sealed class MultipleHttpMessageHandlerStub : DelegatingHandler
+{
+ private int _callIteration = 0;
+
+ public List RequestHeaders { get; private set; }
+
+ public List ContentHeaders { get; private set; }
+
+ public List RequestContents { get; private set; }
+
+ public List RequestUris { get; private set; }
+
+ public List Methods { get; private set; }
+
+ public List ResponsesToReturn { get; set; }
+
+ public MultipleHttpMessageHandlerStub()
+ {
+ this.RequestHeaders = [];
+ this.ContentHeaders = [];
+ this.RequestContents = [];
+ this.RequestUris = [];
+ this.Methods = [];
+ this.ResponsesToReturn = [];
+ }
+
+ protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
+ {
+ this._callIteration++;
+
+ this.Methods.Add(request.Method);
+ this.RequestUris.Add(request.RequestUri);
+ this.RequestHeaders.Add(request.Headers);
+ this.ContentHeaders.Add(request.Content?.Headers);
+
+ var content = request.Content is null ? null : await request.Content.ReadAsByteArrayAsync(cancellationToken);
+
+ this.RequestContents.Add(content);
+
+ return await Task.FromResult(this.ResponsesToReturn[this._callIteration - 1]);
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_multiple_function_calls_test_response.json b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_multiple_function_calls_test_response.json
new file mode 100644
index 000000000000..737b972309ba
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_multiple_function_calls_test_response.json
@@ -0,0 +1,64 @@
+{
+ "id": "response-id",
+ "object": "chat.completion",
+ "created": 1699896916,
+ "model": "gpt-3.5-turbo-0613",
+ "choices": [
+ {
+ "index": 0,
+ "message": {
+ "role": "assistant",
+ "content": null,
+ "tool_calls": [
+ {
+ "id": "1",
+ "type": "function",
+ "function": {
+ "name": "MyPlugin-GetCurrentWeather",
+ "arguments": "{\n\"location\": \"Boston, MA\"\n}"
+ }
+ },
+ {
+ "id": "2",
+ "type": "function",
+ "function": {
+ "name": "MyPlugin-FunctionWithException",
+ "arguments": "{\n\"argument\": \"value\"\n}"
+ }
+ },
+ {
+ "id": "3",
+ "type": "function",
+ "function": {
+ "name": "MyPlugin-NonExistentFunction",
+ "arguments": "{\n\"argument\": \"value\"\n}"
+ }
+ },
+ {
+ "id": "4",
+ "type": "function",
+ "function": {
+ "name": "MyPlugin-InvalidArguments",
+ "arguments": "invalid_arguments_format"
+ }
+ },
+ {
+ "id": "5",
+ "type": "function",
+ "function": {
+ "name": "MyPlugin-IntArguments",
+ "arguments": "{\n\"age\": 36\n}"
+ }
+ }
+ ]
+ },
+ "logprobs": null,
+ "finish_reason": "tool_calls"
+ }
+ ],
+ "usage": {
+ "prompt_tokens": 82,
+ "completion_tokens": 17,
+ "total_tokens": 99
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_single_function_call_test_response.json b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_single_function_call_test_response.json
new file mode 100644
index 000000000000..6c93e434f259
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_single_function_call_test_response.json
@@ -0,0 +1,32 @@
+{
+ "id": "response-id",
+ "object": "chat.completion",
+ "created": 1699896916,
+ "model": "gpt-3.5-turbo-0613",
+ "choices": [
+ {
+ "index": 0,
+ "message": {
+ "role": "assistant",
+ "content": null,
+ "tool_calls": [
+ {
+ "id": "1",
+ "type": "function",
+ "function": {
+ "name": "MyPlugin-GetCurrentWeather",
+ "arguments": "{\n\"location\": \"Boston, MA\"\n}"
+ }
+ }
+ ]
+ },
+ "logprobs": null,
+ "finish_reason": "tool_calls"
+ }
+ ],
+ "usage": {
+ "prompt_tokens": 82,
+ "completion_tokens": 17,
+ "total_tokens": 99
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_multiple_function_calls_test_response.txt b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_multiple_function_calls_test_response.txt
new file mode 100644
index 000000000000..ceb8f3e8b44b
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_multiple_function_calls_test_response.txt
@@ -0,0 +1,9 @@
+data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":0,"id":"1","type":"function","function":{"name":"MyPlugin-GetCurrentWeather","arguments":"{\n\"location\": \"Boston, MA\"\n}"}}]},"finish_reason":"tool_calls"}]}
+
+data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":1,"id":"2","type":"function","function":{"name":"MyPlugin-FunctionWithException","arguments":"{\n\"argument\": \"value\"\n}"}}]},"finish_reason":"tool_calls"}]}
+
+data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":2,"id":"3","type":"function","function":{"name":"MyPlugin-NonExistentFunction","arguments":"{\n\"argument\": \"value\"\n}"}}]},"finish_reason":"tool_calls"}]}
+
+data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":3,"id":"4","type":"function","function":{"name":"MyPlugin-InvalidArguments","arguments":"invalid_arguments_format"}}]},"finish_reason":"tool_calls"}]}
+
+data: [DONE]
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_single_function_call_test_response.txt b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_single_function_call_test_response.txt
new file mode 100644
index 000000000000..6835039941ce
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_single_function_call_test_response.txt
@@ -0,0 +1,3 @@
+data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":0,"id":"1","type":"function","function":{"name":"MyPlugin-GetCurrentWeather","arguments":"{\n\"location\": \"Boston, MA\"\n}"}}]},"finish_reason":"tool_calls"}]}
+
+data: [DONE]
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_test_response.txt b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_test_response.txt
new file mode 100644
index 000000000000..e5e8d1b19afd
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_test_response.txt
@@ -0,0 +1,5 @@
+data: {"id":"chatcmpl-96fqQVHGjG9Yzs4ZMB1K6nfy2oEoo","object":"chat.completion.chunk","created":1711377846,"model":"gpt-4-0125-preview","system_fingerprint":"fp_a7daf7c51e","choices":[{"index":0,"delta":{"content":"Test chat streaming response"},"logprobs":null,"finish_reason":null}]}
+
+data: {"id":"chatcmpl-96fqQVHGjG9Yzs4ZMB1K6nfy2oEoo","object":"chat.completion.chunk","created":1711377846,"model":"gpt-4-0125-preview","system_fingerprint":"fp_a7daf7c51e","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
+
+data: [DONE]
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_test_response.json b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_test_response.json
new file mode 100644
index 000000000000..b601bac8b55b
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_test_response.json
@@ -0,0 +1,22 @@
+{
+ "id": "response-id",
+ "object": "chat.completion",
+ "created": 1704208954,
+ "model": "gpt-4",
+ "choices": [
+ {
+ "index": 0,
+ "message": {
+ "role": "assistant",
+ "content": "Test chat response"
+ },
+ "finish_reason": "stop"
+ }
+ ],
+ "usage": {
+ "prompt_tokens": 55,
+ "completion_tokens": 100,
+ "total_tokens": 155
+ },
+ "system_fingerprint": null
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_with_data_streaming_test_response.txt b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_with_data_streaming_test_response.txt
new file mode 100644
index 000000000000..5e17403da9fc
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_with_data_streaming_test_response.txt
@@ -0,0 +1 @@
+data: {"id":"response-id","model":"","created":1684304924,"object":"chat.completion","choices":[{"index":0,"messages":[{"delta":{"role":"assistant","content":"Test chat with data streaming response"},"end_turn":false}]}]}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_with_data_test_response.json b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_with_data_test_response.json
new file mode 100644
index 000000000000..40d769dac8a7
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_with_data_test_response.json
@@ -0,0 +1,28 @@
+{
+ "id": "response-id",
+ "model": "",
+ "created": 1684304924,
+ "object": "chat.completion",
+ "choices": [
+ {
+ "index": 0,
+ "messages": [
+ {
+ "role": "tool",
+ "content": "{\"citations\": [{\"content\": \"\\nAzure AI services are cloud-based artificial intelligence (AI) services...\", \"id\": null, \"title\": \"What is Azure AI services\", \"filepath\": null, \"url\": null, \"metadata\": {\"chunking\": \"original document size=250. Scores=0.4314117431640625 and 1.72564697265625.Org Highlight count=4.\"}, \"chunk_id\": \"0\"}], \"intent\": \"[\\\"Learn about Azure AI services.\\\"]\"}",
+ "end_turn": false
+ },
+ {
+ "role": "assistant",
+ "content": "Test chat with data response",
+ "end_turn": true
+ }
+ ]
+ }
+ ],
+ "usage": {
+ "prompt_tokens": 55,
+ "completion_tokens": 100,
+ "total_tokens": 155
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/filters_multiple_function_calls_test_response.json b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/filters_multiple_function_calls_test_response.json
new file mode 100644
index 000000000000..3ffa6b00cc3f
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/filters_multiple_function_calls_test_response.json
@@ -0,0 +1,40 @@
+{
+ "id": "response-id",
+ "object": "chat.completion",
+ "created": 1699896916,
+ "model": "gpt-3.5-turbo-0613",
+ "choices": [
+ {
+ "index": 0,
+ "message": {
+ "role": "assistant",
+ "content": null,
+ "tool_calls": [
+ {
+ "id": "1",
+ "type": "function",
+ "function": {
+ "name": "MyPlugin-Function1",
+ "arguments": "{\n\"parameter\": \"function1-value\"\n}"
+ }
+ },
+ {
+ "id": "2",
+ "type": "function",
+ "function": {
+ "name": "MyPlugin-Function2",
+ "arguments": "{\n\"parameter\": \"function2-value\"\n}"
+ }
+ }
+ ]
+ },
+ "logprobs": null,
+ "finish_reason": "tool_calls"
+ }
+ ],
+ "usage": {
+ "prompt_tokens": 82,
+ "completion_tokens": 17,
+ "total_tokens": 99
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/filters_streaming_multiple_function_calls_test_response.txt b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/filters_streaming_multiple_function_calls_test_response.txt
new file mode 100644
index 000000000000..c8aeb98e8b82
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/filters_streaming_multiple_function_calls_test_response.txt
@@ -0,0 +1,5 @@
+data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":0,"id":"1","type":"function","function":{"name":"MyPlugin-Function1","arguments":"{\n\"parameter\": \"function1-value\"\n}"}}]},"finish_reason":"tool_calls"}]}
+
+data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":1,"id":"2","type":"function","function":{"name":"MyPlugin-Function2","arguments":"{\n\"parameter\": \"function2-value\"\n}"}}]},"finish_reason":"tool_calls"}]}
+
+data: [DONE]
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text_completion_streaming_test_response.txt b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text_completion_streaming_test_response.txt
new file mode 100644
index 000000000000..a511ea446236
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text_completion_streaming_test_response.txt
@@ -0,0 +1,3 @@
+data: {"id":"response-id","object":"text_completion","created":1646932609,"model":"ada","choices":[{"text":"Test chat streaming response","index":0,"logprobs":null,"finish_reason":"length"}],"usage":{"prompt_tokens":55,"completion_tokens":100,"total_tokens":155}}
+
+data: [DONE]
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text_completion_test_response.json b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text_completion_test_response.json
new file mode 100644
index 000000000000..540229437440
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text_completion_test_response.json
@@ -0,0 +1,19 @@
+{
+ "id": "response-id",
+ "object": "text_completion",
+ "created": 1646932609,
+ "model": "ada",
+ "choices": [
+ {
+ "text": "Test chat response",
+ "index": 0,
+ "logprobs": null,
+ "finish_reason": "length"
+ }
+ ],
+ "usage": {
+ "prompt_tokens": 55,
+ "completion_tokens": 100,
+ "total_tokens": 155
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/AddHeaderRequestPolicy.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/AddHeaderRequestPolicy.cs
new file mode 100644
index 000000000000..8303b2ceaeaf
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/AddHeaderRequestPolicy.cs
@@ -0,0 +1,20 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using Azure.Core;
+using Azure.Core.Pipeline;
+
+namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+
+///
+/// Helper class to inject headers into Azure SDK HTTP pipeline
+///
+internal sealed class AddHeaderRequestPolicy(string headerName, string headerValue) : HttpPipelineSynchronousPolicy
+{
+ private readonly string _headerName = headerName;
+ private readonly string _headerValue = headerValue;
+
+ public override void OnSendingRequest(HttpMessage message)
+ {
+ message.Request.Headers.Add(this._headerName, this._headerValue);
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/AzureOpenAIPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/AzureOpenAIPromptExecutionSettings.cs
new file mode 100644
index 000000000000..69c305f58f34
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/AzureOpenAIPromptExecutionSettings.cs
@@ -0,0 +1,432 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using System.Collections.ObjectModel;
+using System.Diagnostics.CodeAnalysis;
+using System.Text.Json;
+using System.Text.Json.Serialization;
+using Azure.AI.OpenAI;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Text;
+
+namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+
+///
+/// Execution settings for an AzureOpenAI completion request.
+///
+[JsonNumberHandling(JsonNumberHandling.AllowReadingFromString)]
+public sealed class AzureOpenAIPromptExecutionSettings : PromptExecutionSettings
+{
+ ///
+ /// Temperature controls the randomness of the completion.
+ /// The higher the temperature, the more random the completion.
+ /// Default is 1.0.
+ ///
+ [JsonPropertyName("temperature")]
+ public double Temperature
+ {
+ get => this._temperature;
+
+ set
+ {
+ this.ThrowIfFrozen();
+ this._temperature = value;
+ }
+ }
+
+ ///
+ /// TopP controls the diversity of the completion.
+ /// The higher the TopP, the more diverse the completion.
+ /// Default is 1.0.
+ ///
+ [JsonPropertyName("top_p")]
+ public double TopP
+ {
+ get => this._topP;
+
+ set
+ {
+ this.ThrowIfFrozen();
+ this._topP = value;
+ }
+ }
+
+ ///
+ /// Number between -2.0 and 2.0. Positive values penalize new tokens
+ /// based on whether they appear in the text so far, increasing the
+ /// model's likelihood to talk about new topics.
+ ///
+ [JsonPropertyName("presence_penalty")]
+ public double PresencePenalty
+ {
+ get => this._presencePenalty;
+
+ set
+ {
+ this.ThrowIfFrozen();
+ this._presencePenalty = value;
+ }
+ }
+
+ ///
+ /// Number between -2.0 and 2.0. Positive values penalize new tokens
+ /// based on their existing frequency in the text so far, decreasing
+ /// the model's likelihood to repeat the same line verbatim.
+ ///
+ [JsonPropertyName("frequency_penalty")]
+ public double FrequencyPenalty
+ {
+ get => this._frequencyPenalty;
+
+ set
+ {
+ this.ThrowIfFrozen();
+ this._frequencyPenalty = value;
+ }
+ }
+
+ ///
+ /// The maximum number of tokens to generate in the completion.
+ ///
+ [JsonPropertyName("max_tokens")]
+ public int? MaxTokens
+ {
+ get => this._maxTokens;
+
+ set
+ {
+ this.ThrowIfFrozen();
+ this._maxTokens = value;
+ }
+ }
+
+ ///
+ /// Sequences where the completion will stop generating further tokens.
+ ///
+ [JsonPropertyName("stop_sequences")]
+ public IList? StopSequences
+ {
+ get => this._stopSequences;
+
+ set
+ {
+ this.ThrowIfFrozen();
+ this._stopSequences = value;
+ }
+ }
+
+ ///
+ /// How many completions to generate for each prompt. Default is 1.
+ /// Note: Because this parameter generates many completions, it can quickly consume your token quota.
+ /// Use carefully and ensure that you have reasonable settings for max_tokens and stop.
+ ///
+ [JsonPropertyName("results_per_prompt")]
+ public int ResultsPerPrompt
+ {
+ get => this._resultsPerPrompt;
+
+ set
+ {
+ this.ThrowIfFrozen();
+ this._resultsPerPrompt = value;
+ }
+ }
+
+ ///
+ /// If specified, the system will make a best effort to sample deterministically such that repeated requests with the
+ /// same seed and parameters should return the same result. Determinism is not guaranteed.
+ ///
+ [JsonPropertyName("seed")]
+ public long? Seed
+ {
+ get => this._seed;
+
+ set
+ {
+ this.ThrowIfFrozen();
+ this._seed = value;
+ }
+ }
+
+ ///
+ /// Gets or sets the response format to use for the completion.
+ ///
+ ///
+ /// Possible values are: "json_object", "text", object.
+ ///
+ [Experimental("SKEXP0010")]
+ [JsonPropertyName("response_format")]
+ public object? ResponseFormat
+ {
+ get => this._responseFormat;
+
+ set
+ {
+ this.ThrowIfFrozen();
+ this._responseFormat = value;
+ }
+ }
+
+ ///
+ /// The system prompt to use when generating text using a chat model.
+ /// Defaults to "Assistant is a large language model."
+ ///
+ [JsonPropertyName("chat_system_prompt")]
+ public string? ChatSystemPrompt
+ {
+ get => this._chatSystemPrompt;
+
+ set
+ {
+ this.ThrowIfFrozen();
+ this._chatSystemPrompt = value;
+ }
+ }
+
+ ///
+ /// Modify the likelihood of specified tokens appearing in the completion.
+ ///
+ [JsonPropertyName("token_selection_biases")]
+ public IDictionary? TokenSelectionBiases
+ {
+ get => this._tokenSelectionBiases;
+
+ set
+ {
+ this.ThrowIfFrozen();
+ this._tokenSelectionBiases = value;
+ }
+ }
+
+ ///
+ /// Gets or sets the behavior for how tool calls are handled.
+ ///
+ ///
+ ///
+ /// - To disable all tool calling, set the property to null (the default).
+ /// -
+ /// To request that the model use a specific function, set the property to an instance returned
+ /// from .
+ ///
+ /// -
+ /// To allow the model to request one of any number of functions, set the property to an
+ /// instance returned from , called with
+ /// a list of the functions available.
+ ///
+ /// -
+ /// To allow the model to request one of any of the functions in the supplied ,
+ /// set the property to if the client should simply
+ /// send the information about the functions and not handle the response in any special manner, or
+ /// if the client should attempt to automatically
+ /// invoke the function and send the result back to the service.
+ ///
+ ///
+ /// For all options where an instance is provided, auto-invoke behavior may be selected. If the service
+ /// sends a request for a function call, if auto-invoke has been requested, the client will attempt to
+ /// resolve that function from the functions available in the , and if found, rather
+ /// than returning the response back to the caller, it will handle the request automatically, invoking
+ /// the function, and sending back the result. The intermediate messages will be retained in the
+ /// if an instance was provided.
+ ///
+ public AzureToolCallBehavior? ToolCallBehavior
+ {
+ get => this._toolCallBehavior;
+
+ set
+ {
+ this.ThrowIfFrozen();
+ this._toolCallBehavior = value;
+ }
+ }
+
+ ///
+ /// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse
+ ///
+ public string? User
+ {
+ get => this._user;
+
+ set
+ {
+ this.ThrowIfFrozen();
+ this._user = value;
+ }
+ }
+
+ ///
+ /// Whether to return log probabilities of the output tokens or not.
+ /// If true, returns the log probabilities of each output token returned in the `content` of `message`.
+ ///
+ [Experimental("SKEXP0010")]
+ [JsonPropertyName("logprobs")]
+ public bool? Logprobs
+ {
+ get => this._logprobs;
+
+ set
+ {
+ this.ThrowIfFrozen();
+ this._logprobs = value;
+ }
+ }
+
+ ///
+ /// An integer specifying the number of most likely tokens to return at each token position, each with an associated log probability.
+ ///
+ [Experimental("SKEXP0010")]
+ [JsonPropertyName("top_logprobs")]
+ public int? TopLogprobs
+ {
+ get => this._topLogprobs;
+
+ set
+ {
+ this.ThrowIfFrozen();
+ this._topLogprobs = value;
+ }
+ }
+
+ ///
+ /// An abstraction of additional settings for chat completion, see https://learn.microsoft.com/en-us/dotnet/api/azure.ai.openai.azurechatextensionsoptions.
+ /// This property is compatible only with Azure OpenAI.
+ ///
+ [Experimental("SKEXP0010")]
+ [JsonIgnore]
+ public AzureChatExtensionsOptions? AzureChatExtensionsOptions
+ {
+ get => this._azureChatExtensionsOptions;
+
+ set
+ {
+ this.ThrowIfFrozen();
+ this._azureChatExtensionsOptions = value;
+ }
+ }
+
+ ///
+ public override void Freeze()
+ {
+ if (this.IsFrozen)
+ {
+ return;
+ }
+
+ base.Freeze();
+
+ if (this._stopSequences is not null)
+ {
+ this._stopSequences = new ReadOnlyCollection(this._stopSequences);
+ }
+
+ if (this._tokenSelectionBiases is not null)
+ {
+ this._tokenSelectionBiases = new ReadOnlyDictionary(this._tokenSelectionBiases);
+ }
+ }
+
+ ///
+ public override PromptExecutionSettings Clone()
+ {
+ return new AzureOpenAIPromptExecutionSettings()
+ {
+ ModelId = this.ModelId,
+ ExtensionData = this.ExtensionData is not null ? new Dictionary(this.ExtensionData) : null,
+ Temperature = this.Temperature,
+ TopP = this.TopP,
+ PresencePenalty = this.PresencePenalty,
+ FrequencyPenalty = this.FrequencyPenalty,
+ MaxTokens = this.MaxTokens,
+ StopSequences = this.StopSequences is not null ? new List(this.StopSequences) : null,
+ ResultsPerPrompt = this.ResultsPerPrompt,
+ Seed = this.Seed,
+ ResponseFormat = this.ResponseFormat,
+ TokenSelectionBiases = this.TokenSelectionBiases is not null ? new Dictionary(this.TokenSelectionBiases) : null,
+ ToolCallBehavior = this.ToolCallBehavior,
+ User = this.User,
+ ChatSystemPrompt = this.ChatSystemPrompt,
+ Logprobs = this.Logprobs,
+ TopLogprobs = this.TopLogprobs,
+ AzureChatExtensionsOptions = this.AzureChatExtensionsOptions,
+ };
+ }
+
+ ///
+ /// Default max tokens for a text generation
+ ///
+ internal static int DefaultTextMaxTokens { get; } = 256;
+
+ ///
+ /// Create a new settings object with the values from another settings object.
+ ///
+ /// Template configuration
+ /// Default max tokens
+ /// An instance of OpenAIPromptExecutionSettings
+ public static AzureOpenAIPromptExecutionSettings FromExecutionSettings(PromptExecutionSettings? executionSettings, int? defaultMaxTokens = null)
+ {
+ if (executionSettings is null)
+ {
+ return new AzureOpenAIPromptExecutionSettings()
+ {
+ MaxTokens = defaultMaxTokens
+ };
+ }
+
+ if (executionSettings is AzureOpenAIPromptExecutionSettings settings)
+ {
+ return settings;
+ }
+
+ var json = JsonSerializer.Serialize(executionSettings);
+
+ var openAIExecutionSettings = JsonSerializer.Deserialize(json, JsonOptionsCache.ReadPermissive);
+ if (openAIExecutionSettings is not null)
+ {
+ return openAIExecutionSettings;
+ }
+
+ throw new ArgumentException($"Invalid execution settings, cannot convert to {nameof(AzureOpenAIPromptExecutionSettings)}", nameof(executionSettings));
+ }
+
+ ///
+ /// Create a new settings object with the values from another settings object.
+ ///
+ /// Template configuration
+ /// Default max tokens
+ /// An instance of OpenAIPromptExecutionSettings
+ [Obsolete("This method is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")]
+ public static AzureOpenAIPromptExecutionSettings FromExecutionSettingsWithData(PromptExecutionSettings? executionSettings, int? defaultMaxTokens = null)
+ {
+ var settings = FromExecutionSettings(executionSettings, defaultMaxTokens);
+
+ if (settings.StopSequences?.Count == 0)
+ {
+ // Azure OpenAI WithData API does not allow to send empty array of stop sequences
+ // Gives back "Validation error at #/stop/str: Input should be a valid string\nValidation error at #/stop/list[str]: List should have at least 1 item after validation, not 0"
+ settings.StopSequences = null;
+ }
+
+ return settings;
+ }
+
+ #region private ================================================================================
+
+ private double _temperature = 1;
+ private double _topP = 1;
+ private double _presencePenalty;
+ private double _frequencyPenalty;
+ private int? _maxTokens;
+ private IList? _stopSequences;
+ private int _resultsPerPrompt = 1;
+ private long? _seed;
+ private object? _responseFormat;
+ private IDictionary? _tokenSelectionBiases;
+ private AzureToolCallBehavior? _toolCallBehavior;
+ private string? _user;
+ private string? _chatSystemPrompt;
+ private bool? _logprobs;
+ private int? _topLogprobs;
+ private AzureChatExtensionsOptions? _azureChatExtensionsOptions;
+
+ #endregion
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/AzureToolCallBehavior.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/AzureToolCallBehavior.cs
new file mode 100644
index 000000000000..4c3baef49268
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/AzureToolCallBehavior.cs
@@ -0,0 +1,269 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using System.ComponentModel;
+using System.Diagnostics;
+using System.Linq;
+using System.Text.Json;
+using Azure.AI.OpenAI;
+
+namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+
+/// Represents a behavior for Azure OpenAI tool calls.
+public abstract class AzureToolCallBehavior
+{
+ // NOTE: Right now, the only tools that are available are for function calling. In the future,
+ // this class can be extended to support additional kinds of tools, including composite ones:
+ // the OpenAIPromptExecutionSettings has a single ToolCallBehavior property, but we could
+ // expose a `public static ToolCallBehavior Composite(params ToolCallBehavior[] behaviors)`
+ // or the like to allow multiple distinct tools to be provided, should that be appropriate.
+ // We can also consider additional forms of tools, such as ones that dynamically examine
+ // the Kernel, KernelArguments, etc., and dynamically contribute tools to the ChatCompletionsOptions.
+
+ ///
+ /// The default maximum number of tool-call auto-invokes that can be made in a single request.
+ ///
+ ///
+ /// After this number of iterations as part of a single user request is reached, auto-invocation
+ /// will be disabled (e.g. will behave like )).
+ /// This is a safeguard against possible runaway execution if the model routinely re-requests
+ /// the same function over and over. It is currently hardcoded, but in the future it could
+ /// be made configurable by the developer. Other configuration is also possible in the future,
+ /// such as a delegate on the instance that can be invoked upon function call failure (e.g. failure
+ /// to find the requested function, failure to invoke the function, etc.), with behaviors for
+ /// what to do in such a case, e.g. respond to the model telling it to try again. With parallel tool call
+ /// support, where the model can request multiple tools in a single response, it is significantly
+ /// less likely that this limit is reached, as most of the time only a single request is needed.
+ ///
+ private const int DefaultMaximumAutoInvokeAttempts = 128;
+
+ ///
+ /// Gets an instance that will provide all of the 's plugins' function information.
+ /// Function call requests from the model will be propagated back to the caller.
+ ///
+ ///
+ /// If no is available, no function information will be provided to the model.
+ ///
+ public static AzureToolCallBehavior EnableKernelFunctions { get; } = new KernelFunctions(autoInvoke: false);
+
+ ///
+ /// Gets an instance that will both provide all of the 's plugins' function information
+ /// to the model and attempt to automatically handle any function call requests.
+ ///
+ ///
+ /// When successful, tool call requests from the model become an implementation detail, with the service
+ /// handling invoking any requested functions and supplying the results back to the model.
+ /// If no is available, no function information will be provided to the model.
+ ///
+ public static AzureToolCallBehavior AutoInvokeKernelFunctions { get; } = new KernelFunctions(autoInvoke: true);
+
+ /// Gets an instance that will provide the specified list of functions to the model.
+ /// The functions that should be made available to the model.
+ /// true to attempt to automatically handle function call requests; otherwise, false.
+ ///
+ /// The that may be set into
+ /// to indicate that the specified functions should be made available to the model.
+ ///
+ public static AzureToolCallBehavior EnableFunctions(IEnumerable functions, bool autoInvoke = false)
+ {
+ Verify.NotNull(functions);
+ return new EnabledFunctions(functions, autoInvoke);
+ }
+
+ /// Gets an instance that will request the model to use the specified function.
+ /// The function the model should request to use.
+ /// true to attempt to automatically handle function call requests; otherwise, false.
+ ///
+ /// The that may be set into
+ /// to indicate that the specified function should be requested by the model.
+ ///
+ public static AzureToolCallBehavior RequireFunction(AzureOpenAIFunction function, bool autoInvoke = false)
+ {
+ Verify.NotNull(function);
+ return new RequiredFunction(function, autoInvoke);
+ }
+
+ /// Initializes the instance; prevents external instantiation.
+ private AzureToolCallBehavior(bool autoInvoke)
+ {
+ this.MaximumAutoInvokeAttempts = autoInvoke ? DefaultMaximumAutoInvokeAttempts : 0;
+ }
+
+ ///
+ /// Options to control tool call result serialization behavior.
+ ///
+ [Obsolete("This property is deprecated in favor of Kernel.SerializerOptions that will be introduced in one of the following releases.")]
+ [EditorBrowsable(EditorBrowsableState.Never)]
+ public virtual JsonSerializerOptions? ToolCallResultSerializerOptions { get; set; }
+
+ /// Gets how many requests are part of a single interaction should include this tool in the request.
+ ///
+ /// This should be greater than or equal to . It defaults to .
+ /// Once this limit is reached, the tools will no longer be included in subsequent retries as part of the operation, e.g.
+ /// if this is 1, the first request will include the tools, but the subsequent response sending back the tool's result
+ /// will not include the tools for further use.
+ ///
+ internal virtual int MaximumUseAttempts => int.MaxValue;
+
+ /// Gets how many tool call request/response roundtrips are supported with auto-invocation.
+ ///
+ /// To disable auto invocation, this can be set to 0.
+ ///
+ internal int MaximumAutoInvokeAttempts { get; }
+
+ ///
+ /// Gets whether validation against a specified list is required before allowing the model to request a function from the kernel.
+ ///
+ /// true if it's ok to invoke any kernel function requested by the model if it's found; false if a request needs to be validated against an allow list.
+ internal virtual bool AllowAnyRequestedKernelFunction => false;
+
+ /// Configures the with any tools this provides.
+ /// The used for the operation. This can be queried to determine what tools to provide into the .
+ /// The destination to configure.
+ internal abstract void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions options);
+
+ ///
+ /// Represents a that will provide to the model all available functions from a
+ /// provided by the client. Setting this will have no effect if no is provided.
+ ///
+ internal sealed class KernelFunctions : AzureToolCallBehavior
+ {
+ internal KernelFunctions(bool autoInvoke) : base(autoInvoke) { }
+
+ public override string ToString() => $"{nameof(KernelFunctions)}(autoInvoke:{this.MaximumAutoInvokeAttempts != 0})";
+
+ internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions options)
+ {
+ // If no kernel is provided, we don't have any tools to provide.
+ if (kernel is not null)
+ {
+ // Provide all functions from the kernel.
+ IList functions = kernel.Plugins.GetFunctionsMetadata();
+ if (functions.Count > 0)
+ {
+ options.ToolChoice = ChatCompletionsToolChoice.Auto;
+ for (int i = 0; i < functions.Count; i++)
+ {
+ options.Tools.Add(new ChatCompletionsFunctionToolDefinition(functions[i].ToAzureOpenAIFunction().ToFunctionDefinition()));
+ }
+ }
+ }
+ }
+
+ internal override bool AllowAnyRequestedKernelFunction => true;
+ }
+
+ ///
+ /// Represents a that provides a specified list of functions to the model.
+ ///
+ internal sealed class EnabledFunctions : AzureToolCallBehavior
+ {
+ private readonly AzureOpenAIFunction[] _openAIFunctions;
+ private readonly ChatCompletionsFunctionToolDefinition[] _functions;
+
+ public EnabledFunctions(IEnumerable functions, bool autoInvoke) : base(autoInvoke)
+ {
+ this._openAIFunctions = functions.ToArray();
+
+ var defs = new ChatCompletionsFunctionToolDefinition[this._openAIFunctions.Length];
+ for (int i = 0; i < defs.Length; i++)
+ {
+ defs[i] = new ChatCompletionsFunctionToolDefinition(this._openAIFunctions[i].ToFunctionDefinition());
+ }
+ this._functions = defs;
+ }
+
+ public override string ToString() => $"{nameof(EnabledFunctions)}(autoInvoke:{this.MaximumAutoInvokeAttempts != 0}): {string.Join(", ", this._functions.Select(f => f.Name))}";
+
+ internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions options)
+ {
+ AzureOpenAIFunction[] openAIFunctions = this._openAIFunctions;
+ ChatCompletionsFunctionToolDefinition[] functions = this._functions;
+ Debug.Assert(openAIFunctions.Length == functions.Length);
+
+ if (openAIFunctions.Length > 0)
+ {
+ bool autoInvoke = base.MaximumAutoInvokeAttempts > 0;
+
+ // If auto-invocation is specified, we need a kernel to be able to invoke the functions.
+ // Lack of a kernel is fatal: we don't want to tell the model we can handle the functions
+ // and then fail to do so, so we fail before we get to that point. This is an error
+ // on the consumers behalf: if they specify auto-invocation with any functions, they must
+ // specify the kernel and the kernel must contain those functions.
+ if (autoInvoke && kernel is null)
+ {
+ throw new KernelException($"Auto-invocation with {nameof(EnabledFunctions)} is not supported when no kernel is provided.");
+ }
+
+ options.ToolChoice = ChatCompletionsToolChoice.Auto;
+ for (int i = 0; i < openAIFunctions.Length; i++)
+ {
+ // Make sure that if auto-invocation is specified, every enabled function can be found in the kernel.
+ if (autoInvoke)
+ {
+ Debug.Assert(kernel is not null);
+ AzureOpenAIFunction f = openAIFunctions[i];
+ if (!kernel!.Plugins.TryGetFunction(f.PluginName, f.FunctionName, out _))
+ {
+ throw new KernelException($"The specified {nameof(EnabledFunctions)} function {f.FullyQualifiedName} is not available in the kernel.");
+ }
+ }
+
+ // Add the function.
+ options.Tools.Add(functions[i]);
+ }
+ }
+ }
+ }
+
+ /// Represents a that requests the model use a specific function.
+ internal sealed class RequiredFunction : AzureToolCallBehavior
+ {
+ private readonly AzureOpenAIFunction _function;
+ private readonly ChatCompletionsFunctionToolDefinition _tool;
+ private readonly ChatCompletionsToolChoice _choice;
+
+ public RequiredFunction(AzureOpenAIFunction function, bool autoInvoke) : base(autoInvoke)
+ {
+ this._function = function;
+ this._tool = new ChatCompletionsFunctionToolDefinition(function.ToFunctionDefinition());
+ this._choice = new ChatCompletionsToolChoice(this._tool);
+ }
+
+ public override string ToString() => $"{nameof(RequiredFunction)}(autoInvoke:{this.MaximumAutoInvokeAttempts != 0}): {this._tool.Name}";
+
+ internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions options)
+ {
+ bool autoInvoke = base.MaximumAutoInvokeAttempts > 0;
+
+ // If auto-invocation is specified, we need a kernel to be able to invoke the functions.
+ // Lack of a kernel is fatal: we don't want to tell the model we can handle the functions
+ // and then fail to do so, so we fail before we get to that point. This is an error
+ // on the consumers behalf: if they specify auto-invocation with any functions, they must
+ // specify the kernel and the kernel must contain those functions.
+ if (autoInvoke && kernel is null)
+ {
+ throw new KernelException($"Auto-invocation with {nameof(RequiredFunction)} is not supported when no kernel is provided.");
+ }
+
+ // Make sure that if auto-invocation is specified, the required function can be found in the kernel.
+ if (autoInvoke && !kernel!.Plugins.TryGetFunction(this._function.PluginName, this._function.FunctionName, out _))
+ {
+ throw new KernelException($"The specified {nameof(RequiredFunction)} function {this._function.FullyQualifiedName} is not available in the kernel.");
+ }
+
+ options.ToolChoice = this._choice;
+ options.Tools.Add(this._tool);
+ }
+
+ /// Gets how many requests are part of a single interaction should include this tool in the request.
+ ///
+ /// Unlike and , this must use 1 as the maximum
+ /// use attempts. Otherwise, every call back to the model _requires_ it to invoke the function (as opposed
+ /// to allows it), which means we end up doing the same work over and over and over until the maximum is reached.
+ /// Thus for "requires", we must send the tool information only once.
+ ///
+ internal override int MaximumUseAttempts => 1;
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/ChatCompletion/AzureOpenAIChatCompletionService.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/ChatCompletion/AzureOpenAIChatCompletionService.cs
new file mode 100644
index 000000000000..e478a301d947
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/ChatCompletion/AzureOpenAIChatCompletionService.cs
@@ -0,0 +1,102 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Collections.Generic;
+using System.Net.Http;
+using System.Threading;
+using System.Threading.Tasks;
+using Azure.AI.OpenAI;
+using Azure.Core;
+using Microsoft.Extensions.Logging;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Services;
+using Microsoft.SemanticKernel.TextGeneration;
+
+namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+
+///
+/// Azure OpenAI chat completion service.
+///
+public sealed class AzureOpenAIChatCompletionService : IChatCompletionService, ITextGenerationService
+{
+ /// Core implementation shared by Azure OpenAI clients.
+ private readonly AzureOpenAIClientCore _core;
+
+ ///
+ /// Create an instance of the connector with API key auth.
+ ///
+ /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource
+ /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart
+ /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart
+ /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource
+ /// Custom for HTTP requests.
+ /// The to use for logging. If null, no logging will be performed.
+ public AzureOpenAIChatCompletionService(
+ string deploymentName,
+ string endpoint,
+ string apiKey,
+ string? modelId = null,
+ HttpClient? httpClient = null,
+ ILoggerFactory? loggerFactory = null)
+ {
+ this._core = new(deploymentName, endpoint, apiKey, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAIChatCompletionService)));
+
+ this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId);
+ }
+
+ ///
+ /// Create an instance of the connector with AAD auth.
+ ///
+ /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource
+ /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart
+ /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc.
+ /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource
+ /// Custom for HTTP requests.
+ /// The to use for logging. If null, no logging will be performed.
+ public AzureOpenAIChatCompletionService(
+ string deploymentName,
+ string endpoint,
+ TokenCredential credentials,
+ string? modelId = null,
+ HttpClient? httpClient = null,
+ ILoggerFactory? loggerFactory = null)
+ {
+ this._core = new(deploymentName, endpoint, credentials, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAIChatCompletionService)));
+ this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId);
+ }
+
+ ///
+ /// Creates a new client instance using the specified .
+ ///
+ /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource
+ /// Custom .
+ /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource
+ /// The to use for logging. If null, no logging will be performed.
+ public AzureOpenAIChatCompletionService(
+ string deploymentName,
+ OpenAIClient openAIClient,
+ string? modelId = null,
+ ILoggerFactory? loggerFactory = null)
+ {
+ this._core = new(deploymentName, openAIClient, loggerFactory?.CreateLogger(typeof(AzureOpenAIChatCompletionService)));
+ this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId);
+ }
+
+ ///
+ public IReadOnlyDictionary Attributes => this._core.Attributes;
+
+ ///
+ public Task> GetChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default)
+ => this._core.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel, cancellationToken);
+
+ ///
+ public IAsyncEnumerable GetStreamingChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default)
+ => this._core.GetStreamingChatMessageContentsAsync(chatHistory, executionSettings, kernel, cancellationToken);
+
+ ///
+ public Task> GetTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default)
+ => this._core.GetChatAsTextContentsAsync(prompt, executionSettings, kernel, cancellationToken);
+
+ ///
+ public IAsyncEnumerable GetStreamingTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default)
+ => this._core.GetChatAsTextStreamingContentsAsync(prompt, executionSettings, kernel, cancellationToken);
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/ChatHistoryExtensions.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/ChatHistoryExtensions.cs
new file mode 100644
index 000000000000..23412f666e23
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/ChatHistoryExtensions.cs
@@ -0,0 +1,70 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
+using System.Text;
+using System.Threading.Tasks;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+
+namespace Microsoft.SemanticKernel;
+
+///
+/// Chat history extensions.
+///
+public static class ChatHistoryExtensions
+{
+ ///
+ /// Add a message to the chat history at the end of the streamed message
+ ///
+ /// Target chat history
+ /// list of streaming message contents
+ /// Returns the original streaming results with some message processing
+ [Experimental("SKEXP0010")]
+ public static async IAsyncEnumerable AddStreamingMessageAsync(this ChatHistory chatHistory, IAsyncEnumerable streamingMessageContents)
+ {
+ List messageContents = [];
+
+ // Stream the response.
+ StringBuilder? contentBuilder = null;
+ Dictionary? toolCallIdsByIndex = null;
+ Dictionary? functionNamesByIndex = null;
+ Dictionary? functionArgumentBuildersByIndex = null;
+ Dictionary? metadata = null;
+ AuthorRole? streamedRole = null;
+ string? streamedName = null;
+
+ await foreach (var chatMessage in streamingMessageContents.ConfigureAwait(false))
+ {
+ metadata ??= (Dictionary?)chatMessage.Metadata;
+
+ if (chatMessage.Content is { Length: > 0 } contentUpdate)
+ {
+ (contentBuilder ??= new()).Append(contentUpdate);
+ }
+
+ AzureOpenAIFunctionToolCall.TrackStreamingToolingUpdate(chatMessage.ToolCallUpdate, ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex);
+
+ // Is always expected to have at least one chunk with the role provided from a streaming message
+ streamedRole ??= chatMessage.Role;
+ streamedName ??= chatMessage.AuthorName;
+
+ messageContents.Add(chatMessage);
+ yield return chatMessage;
+ }
+
+ if (messageContents.Count != 0)
+ {
+ var role = streamedRole ?? AuthorRole.Assistant;
+
+ chatHistory.Add(
+ new AzureOpenAIChatMessageContent(
+ role,
+ contentBuilder?.ToString() ?? string.Empty,
+ messageContents[0].ModelId!,
+ AzureOpenAIFunctionToolCall.ConvertToolCallUpdatesToChatCompletionsFunctionToolCalls(ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex),
+ metadata)
+ { AuthorName = streamedName });
+ }
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Connectors.AzureOpenAI.csproj b/dotnet/src/Connectors/Connectors.AzureOpenAI/Connectors.AzureOpenAI.csproj
index 837dd5b3c1db..8e8f53594708 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI/Connectors.AzureOpenAI.csproj
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Connectors.AzureOpenAI.csproj
@@ -21,7 +21,7 @@
-
+
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIChatMessageContent.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIChatMessageContent.cs
new file mode 100644
index 000000000000..8cbecc909951
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIChatMessageContent.cs
@@ -0,0 +1,117 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Collections.Generic;
+using System.Linq;
+using Azure.AI.OpenAI;
+using Microsoft.SemanticKernel.ChatCompletion;
+
+namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+
+///
+/// AzureOpenAI specialized chat message content
+///
+public sealed class AzureOpenAIChatMessageContent : ChatMessageContent
+{
+ ///
+ /// Gets the metadata key for the name property.
+ ///
+ public static string ToolIdProperty => $"{nameof(ChatCompletionsToolCall)}.{nameof(ChatCompletionsToolCall.Id)}";
+
+ ///
+ /// Gets the metadata key for the list of .
+ ///
+ internal static string FunctionToolCallsProperty => $"{nameof(ChatResponseMessage)}.FunctionToolCalls";
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ internal AzureOpenAIChatMessageContent(ChatResponseMessage chatMessage, string modelId, IReadOnlyDictionary? metadata = null)
+ : base(new AuthorRole(chatMessage.Role.ToString()), chatMessage.Content, modelId, chatMessage, System.Text.Encoding.UTF8, CreateMetadataDictionary(chatMessage.ToolCalls, metadata))
+ {
+ this.ToolCalls = chatMessage.ToolCalls;
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ internal AzureOpenAIChatMessageContent(ChatRole role, string? content, string modelId, IReadOnlyList toolCalls, IReadOnlyDictionary? metadata = null)
+ : base(new AuthorRole(role.ToString()), content, modelId, content, System.Text.Encoding.UTF8, CreateMetadataDictionary(toolCalls, metadata))
+ {
+ this.ToolCalls = toolCalls;
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ internal AzureOpenAIChatMessageContent(AuthorRole role, string? content, string modelId, IReadOnlyList toolCalls, IReadOnlyDictionary? metadata = null)
+ : base(role, content, modelId, content, System.Text.Encoding.UTF8, CreateMetadataDictionary(toolCalls, metadata))
+ {
+ this.ToolCalls = toolCalls;
+ }
+
+ ///
+ /// A list of the tools called by the model.
+ ///
+ public IReadOnlyList ToolCalls { get; }
+
+ ///
+ /// Retrieve the resulting function from the chat result.
+ ///
+ /// The , or null if no function was returned by the model.
+ public IReadOnlyList GetOpenAIFunctionToolCalls()
+ {
+ List? functionToolCallList = null;
+
+ foreach (var toolCall in this.ToolCalls)
+ {
+ if (toolCall is ChatCompletionsFunctionToolCall functionToolCall)
+ {
+ (functionToolCallList ??= []).Add(new AzureOpenAIFunctionToolCall(functionToolCall));
+ }
+ }
+
+ if (functionToolCallList is not null)
+ {
+ return functionToolCallList;
+ }
+
+ return [];
+ }
+
+ private static IReadOnlyDictionary? CreateMetadataDictionary(
+ IReadOnlyList toolCalls,
+ IReadOnlyDictionary? original)
+ {
+ // We only need to augment the metadata if there are any tool calls.
+ if (toolCalls.Count > 0)
+ {
+ Dictionary newDictionary;
+ if (original is null)
+ {
+ // There's no existing metadata to clone; just allocate a new dictionary.
+ newDictionary = new Dictionary(1);
+ }
+ else if (original is IDictionary origIDictionary)
+ {
+ // Efficiently clone the old dictionary to a new one.
+ newDictionary = new Dictionary(origIDictionary);
+ }
+ else
+ {
+ // There's metadata to clone but we have to do so one item at a time.
+ newDictionary = new Dictionary(original.Count + 1);
+ foreach (var kvp in original)
+ {
+ newDictionary[kvp.Key] = kvp.Value;
+ }
+ }
+
+ // Add the additional entry.
+ newDictionary.Add(FunctionToolCallsProperty, toolCalls.OfType().ToList());
+
+ return newDictionary;
+ }
+
+ return original;
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIClientCore.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIClientCore.cs
new file mode 100644
index 000000000000..e34b191a83b8
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIClientCore.cs
@@ -0,0 +1,102 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Net.Http;
+using Azure;
+using Azure.AI.OpenAI;
+using Azure.Core;
+using Microsoft.Extensions.Logging;
+using Microsoft.SemanticKernel.Services;
+
+namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+
+///
+/// Core implementation for Azure OpenAI clients, providing common functionality and properties.
+///
+internal sealed class AzureOpenAIClientCore : ClientCore
+{
+ ///
+ /// Gets the key used to store the deployment name in the dictionary.
+ ///
+ public static string DeploymentNameKey => "DeploymentName";
+
+ ///
+ /// OpenAI / Azure OpenAI Client
+ ///
+ internal override OpenAIClient Client { get; }
+
+ ///
+ /// Initializes a new instance of the class using API Key authentication.
+ ///
+ /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource
+ /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart
+ /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart
+ /// Custom for HTTP requests.
+ /// The to use for logging. If null, no logging will be performed.
+ internal AzureOpenAIClientCore(
+ string deploymentName,
+ string endpoint,
+ string apiKey,
+ HttpClient? httpClient = null,
+ ILogger? logger = null) : base(logger)
+ {
+ Verify.NotNullOrWhiteSpace(deploymentName);
+ Verify.NotNullOrWhiteSpace(endpoint);
+ Verify.StartsWith(endpoint, "https://", "The Azure OpenAI endpoint must start with 'https://'");
+ Verify.NotNullOrWhiteSpace(apiKey);
+
+ var options = GetOpenAIClientOptions(httpClient);
+
+ this.DeploymentOrModelName = deploymentName;
+ this.Endpoint = new Uri(endpoint);
+ this.Client = new OpenAIClient(this.Endpoint, new AzureKeyCredential(apiKey), options);
+ }
+
+ ///
+ /// Initializes a new instance of the class supporting AAD authentication.
+ ///
+ /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource
+ /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart
+ /// Token credential, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc.
+ /// Custom for HTTP requests.
+ /// The to use for logging. If null, no logging will be performed.
+ internal AzureOpenAIClientCore(
+ string deploymentName,
+ string endpoint,
+ TokenCredential credential,
+ HttpClient? httpClient = null,
+ ILogger? logger = null) : base(logger)
+ {
+ Verify.NotNullOrWhiteSpace(deploymentName);
+ Verify.NotNullOrWhiteSpace(endpoint);
+ Verify.StartsWith(endpoint, "https://", "The Azure OpenAI endpoint must start with 'https://'");
+
+ var options = GetOpenAIClientOptions(httpClient);
+
+ this.DeploymentOrModelName = deploymentName;
+ this.Endpoint = new Uri(endpoint);
+ this.Client = new OpenAIClient(this.Endpoint, credential, options);
+ }
+
+ ///
+ /// Initializes a new instance of the class using the specified OpenAIClient.
+ /// Note: instances created this way might not have the default diagnostics settings,
+ /// it's up to the caller to configure the client.
+ ///
+ /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource
+ /// Custom .
+ /// The to use for logging. If null, no logging will be performed.
+ internal AzureOpenAIClientCore(
+ string deploymentName,
+ OpenAIClient openAIClient,
+ ILogger? logger = null) : base(logger)
+ {
+ Verify.NotNullOrWhiteSpace(deploymentName);
+ Verify.NotNull(openAIClient);
+
+ this.DeploymentOrModelName = deploymentName;
+ this.Client = openAIClient;
+
+ this.AddAttribute(DeploymentNameKey, deploymentName);
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIFunction.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIFunction.cs
new file mode 100644
index 000000000000..4a3cff49103d
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIFunction.cs
@@ -0,0 +1,178 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using Azure.AI.OpenAI;
+
+namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+
+///
+/// Represents a function parameter that can be passed to an AzureOpenAI function tool call.
+///
+public sealed class AzureOpenAIFunctionParameter
+{
+ internal AzureOpenAIFunctionParameter(string? name, string? description, bool isRequired, Type? parameterType, KernelJsonSchema? schema)
+ {
+ this.Name = name ?? string.Empty;
+ this.Description = description ?? string.Empty;
+ this.IsRequired = isRequired;
+ this.ParameterType = parameterType;
+ this.Schema = schema;
+ }
+
+ /// Gets the name of the parameter.
+ public string Name { get; }
+
+ /// Gets a description of the parameter.
+ public string Description { get; }
+
+ /// Gets whether the parameter is required vs optional.
+ public bool IsRequired { get; }
+
+ /// Gets the of the parameter, if known.
+ public Type? ParameterType { get; }
+
+ /// Gets a JSON schema for the parameter, if known.
+ public KernelJsonSchema? Schema { get; }
+}
+
+///
+/// Represents a function return parameter that can be returned by a tool call to AzureOpenAI.
+///
+public sealed class AzureOpenAIFunctionReturnParameter
+{
+ internal AzureOpenAIFunctionReturnParameter(string? description, Type? parameterType, KernelJsonSchema? schema)
+ {
+ this.Description = description ?? string.Empty;
+ this.Schema = schema;
+ this.ParameterType = parameterType;
+ }
+
+ /// Gets a description of the return parameter.
+ public string Description { get; }
+
+ /// Gets the of the return parameter, if known.
+ public Type? ParameterType { get; }
+
+ /// Gets a JSON schema for the return parameter, if known.
+ public KernelJsonSchema? Schema { get; }
+}
+
+///
+/// Represents a function that can be passed to the AzureOpenAI API
+///
+public sealed class AzureOpenAIFunction
+{
+ ///
+ /// Cached storing the JSON for a function with no parameters.
+ ///
+ ///
+ /// This is an optimization to avoid serializing the same JSON Schema over and over again
+ /// for this relatively common case.
+ ///
+ private static readonly BinaryData s_zeroFunctionParametersSchema = new("""{"type":"object","required":[],"properties":{}}""");
+ ///
+ /// Cached schema for a descriptionless string.
+ ///
+ private static readonly KernelJsonSchema s_stringNoDescriptionSchema = KernelJsonSchema.Parse("""{"type":"string"}""");
+
+ /// Initializes the OpenAIFunction.
+ internal AzureOpenAIFunction(
+ string? pluginName,
+ string functionName,
+ string? description,
+ IReadOnlyList? parameters,
+ AzureOpenAIFunctionReturnParameter? returnParameter)
+ {
+ Verify.NotNullOrWhiteSpace(functionName);
+
+ this.PluginName = pluginName;
+ this.FunctionName = functionName;
+ this.Description = description;
+ this.Parameters = parameters;
+ this.ReturnParameter = returnParameter;
+ }
+
+ /// Gets the separator used between the plugin name and the function name, if a plugin name is present.
+ /// This separator was previously _, but has been changed to - to better align to the behavior elsewhere in SK and in response
+ /// to developers who want to use underscores in their function or plugin names. We plan to make this setting configurable in the future.
+ public static string NameSeparator { get; set; } = "-";
+
+ /// Gets the name of the plugin with which the function is associated, if any.
+ public string? PluginName { get; }
+
+ /// Gets the name of the function.
+ public string FunctionName { get; }
+
+ /// Gets the fully-qualified name of the function.
+ ///
+ /// This is the concatenation of the and the ,
+ /// separated by . If there is no , this is
+ /// the same as .
+ ///
+ public string FullyQualifiedName =>
+ string.IsNullOrEmpty(this.PluginName) ? this.FunctionName : $"{this.PluginName}{NameSeparator}{this.FunctionName}";
+
+ /// Gets a description of the function.
+ public string? Description { get; }
+
+ /// Gets a list of parameters to the function, if any.
+ public IReadOnlyList? Parameters { get; }
+
+ /// Gets the return parameter of the function, if any.
+ public AzureOpenAIFunctionReturnParameter? ReturnParameter { get; }
+
+ ///
+ /// Converts the representation to the Azure SDK's
+ /// representation.
+ ///
+ /// A containing all the function information.
+ public FunctionDefinition ToFunctionDefinition()
+ {
+ BinaryData resultParameters = s_zeroFunctionParametersSchema;
+
+ IReadOnlyList? parameters = this.Parameters;
+ if (parameters is { Count: > 0 })
+ {
+ var properties = new Dictionary();
+ var required = new List();
+
+ for (int i = 0; i < parameters.Count; i++)
+ {
+ var parameter = parameters[i];
+ properties.Add(parameter.Name, parameter.Schema ?? GetDefaultSchemaForTypelessParameter(parameter.Description));
+ if (parameter.IsRequired)
+ {
+ required.Add(parameter.Name);
+ }
+ }
+
+ resultParameters = BinaryData.FromObjectAsJson(new
+ {
+ type = "object",
+ required,
+ properties,
+ });
+ }
+
+ return new FunctionDefinition
+ {
+ Name = this.FullyQualifiedName,
+ Description = this.Description,
+ Parameters = resultParameters,
+ };
+ }
+
+ /// Gets a for a typeless parameter with the specified description, defaulting to typeof(string)
+ private static KernelJsonSchema GetDefaultSchemaForTypelessParameter(string? description)
+ {
+ // If there's a description, incorporate it.
+ if (!string.IsNullOrWhiteSpace(description))
+ {
+ return KernelJsonSchemaBuilder.Build(null, typeof(string), description);
+ }
+
+ // Otherwise, we can use a cached schema for a string with no description.
+ return s_stringNoDescriptionSchema;
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIFunctionToolCall.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIFunctionToolCall.cs
new file mode 100644
index 000000000000..bea73a474d37
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIFunctionToolCall.cs
@@ -0,0 +1,170 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Text;
+using System.Text.Json;
+using Azure.AI.OpenAI;
+
+namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+
+///
+/// Represents an AzureOpenAI function tool call with deserialized function name and arguments.
+///
+public sealed class AzureOpenAIFunctionToolCall
+{
+ private string? _fullyQualifiedFunctionName;
+
+ /// Initialize the from a .
+ internal AzureOpenAIFunctionToolCall(ChatCompletionsFunctionToolCall functionToolCall)
+ {
+ Verify.NotNull(functionToolCall);
+ Verify.NotNull(functionToolCall.Name);
+
+ string fullyQualifiedFunctionName = functionToolCall.Name;
+ string functionName = fullyQualifiedFunctionName;
+ string? arguments = functionToolCall.Arguments;
+ string? pluginName = null;
+
+ int separatorPos = fullyQualifiedFunctionName.IndexOf(AzureOpenAIFunction.NameSeparator, StringComparison.Ordinal);
+ if (separatorPos >= 0)
+ {
+ pluginName = fullyQualifiedFunctionName.AsSpan(0, separatorPos).Trim().ToString();
+ functionName = fullyQualifiedFunctionName.AsSpan(separatorPos + AzureOpenAIFunction.NameSeparator.Length).Trim().ToString();
+ }
+
+ this.Id = functionToolCall.Id;
+ this._fullyQualifiedFunctionName = fullyQualifiedFunctionName;
+ this.PluginName = pluginName;
+ this.FunctionName = functionName;
+ if (!string.IsNullOrWhiteSpace(arguments))
+ {
+ this.Arguments = JsonSerializer.Deserialize>(arguments!);
+ }
+ }
+
+ /// Gets the ID of the tool call.
+ public string? Id { get; }
+
+ /// Gets the name of the plugin with which this function is associated, if any.
+ public string? PluginName { get; }
+
+ /// Gets the name of the function.
+ public string FunctionName { get; }
+
+ /// Gets a name/value collection of the arguments to the function, if any.
+ public Dictionary? Arguments { get; }
+
+ /// Gets the fully-qualified name of the function.
+ ///
+ /// This is the concatenation of the and the ,
+ /// separated by . If there is no ,
+ /// this is the same as .
+ ///
+ public string FullyQualifiedName =>
+ this._fullyQualifiedFunctionName ??=
+ string.IsNullOrEmpty(this.PluginName) ? this.FunctionName : $"{this.PluginName}{AzureOpenAIFunction.NameSeparator}{this.FunctionName}";
+
+ ///
+ public override string ToString()
+ {
+ var sb = new StringBuilder(this.FullyQualifiedName);
+
+ sb.Append('(');
+ if (this.Arguments is not null)
+ {
+ string separator = "";
+ foreach (var arg in this.Arguments)
+ {
+ sb.Append(separator).Append(arg.Key).Append(':').Append(arg.Value);
+ separator = ", ";
+ }
+ }
+ sb.Append(')');
+
+ return sb.ToString();
+ }
+
+ ///
+ /// Tracks tooling updates from streaming responses.
+ ///
+ /// The tool call update to incorporate.
+ /// Lazily-initialized dictionary mapping indices to IDs.
+ /// Lazily-initialized dictionary mapping indices to names.
+ /// Lazily-initialized dictionary mapping indices to arguments.
+ internal static void TrackStreamingToolingUpdate(
+ StreamingToolCallUpdate? update,
+ ref Dictionary? toolCallIdsByIndex,
+ ref Dictionary? functionNamesByIndex,
+ ref Dictionary? functionArgumentBuildersByIndex)
+ {
+ if (update is null)
+ {
+ // Nothing to track.
+ return;
+ }
+
+ // If we have an ID, ensure the index is being tracked. Even if it's not a function update,
+ // we want to keep track of it so we can send back an error.
+ if (update.Id is string id)
+ {
+ (toolCallIdsByIndex ??= [])[update.ToolCallIndex] = id;
+ }
+
+ if (update is StreamingFunctionToolCallUpdate ftc)
+ {
+ // Ensure we're tracking the function's name.
+ if (ftc.Name is string name)
+ {
+ (functionNamesByIndex ??= [])[ftc.ToolCallIndex] = name;
+ }
+
+ // Ensure we're tracking the function's arguments.
+ if (ftc.ArgumentsUpdate is string argumentsUpdate)
+ {
+ if (!(functionArgumentBuildersByIndex ??= []).TryGetValue(ftc.ToolCallIndex, out StringBuilder? arguments))
+ {
+ functionArgumentBuildersByIndex[ftc.ToolCallIndex] = arguments = new();
+ }
+
+ arguments.Append(argumentsUpdate);
+ }
+ }
+ }
+
+ ///
+ /// Converts the data built up by into an array of s.
+ ///
+ /// Dictionary mapping indices to IDs.
+ /// Dictionary mapping indices to names.
+ /// Dictionary mapping indices to arguments.
+ internal static ChatCompletionsFunctionToolCall[] ConvertToolCallUpdatesToChatCompletionsFunctionToolCalls(
+ ref Dictionary? toolCallIdsByIndex,
+ ref Dictionary? functionNamesByIndex,
+ ref Dictionary? functionArgumentBuildersByIndex)
+ {
+ ChatCompletionsFunctionToolCall[] toolCalls = [];
+ if (toolCallIdsByIndex is { Count: > 0 })
+ {
+ toolCalls = new ChatCompletionsFunctionToolCall[toolCallIdsByIndex.Count];
+
+ int i = 0;
+ foreach (KeyValuePair toolCallIndexAndId in toolCallIdsByIndex)
+ {
+ string? functionName = null;
+ StringBuilder? functionArguments = null;
+
+ functionNamesByIndex?.TryGetValue(toolCallIndexAndId.Key, out functionName);
+ functionArgumentBuildersByIndex?.TryGetValue(toolCallIndexAndId.Key, out functionArguments);
+
+ toolCalls[i] = new ChatCompletionsFunctionToolCall(toolCallIndexAndId.Value, functionName ?? string.Empty, functionArguments?.ToString() ?? string.Empty);
+ i++;
+ }
+
+ Debug.Assert(i == toolCalls.Length);
+ }
+
+ return toolCalls;
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIKernelFunctionMetadataExtensions.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIKernelFunctionMetadataExtensions.cs
new file mode 100644
index 000000000000..30f796f82ae0
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIKernelFunctionMetadataExtensions.cs
@@ -0,0 +1,54 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Collections.Generic;
+
+namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+
+///
+/// Extensions for specific to the AzureOpenAI connector.
+///
+public static class AzureOpenAIKernelFunctionMetadataExtensions
+{
+ ///
+ /// Convert a to an .
+ ///
+ /// The object to convert.
+ /// An object.
+ public static AzureOpenAIFunction ToAzureOpenAIFunction(this KernelFunctionMetadata metadata)
+ {
+ IReadOnlyList metadataParams = metadata.Parameters;
+
+ var openAIParams = new AzureOpenAIFunctionParameter[metadataParams.Count];
+ for (int i = 0; i < openAIParams.Length; i++)
+ {
+ var param = metadataParams[i];
+
+ openAIParams[i] = new AzureOpenAIFunctionParameter(
+ param.Name,
+ GetDescription(param),
+ param.IsRequired,
+ param.ParameterType,
+ param.Schema);
+ }
+
+ return new AzureOpenAIFunction(
+ metadata.PluginName,
+ metadata.Name,
+ metadata.Description,
+ openAIParams,
+ new AzureOpenAIFunctionReturnParameter(
+ metadata.ReturnParameter.Description,
+ metadata.ReturnParameter.ParameterType,
+ metadata.ReturnParameter.Schema));
+
+ static string GetDescription(KernelParameterMetadata param)
+ {
+ if (InternalTypeConverter.ConvertToString(param.DefaultValue) is string stringValue && !string.IsNullOrEmpty(stringValue))
+ {
+ return $"{param.Description} (default value: {stringValue})";
+ }
+
+ return param.Description;
+ }
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIPluginCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIPluginCollectionExtensions.cs
new file mode 100644
index 000000000000..c667183f773c
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIPluginCollectionExtensions.cs
@@ -0,0 +1,62 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Diagnostics.CodeAnalysis;
+using Azure.AI.OpenAI;
+
+namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+
+///
+/// Extension methods for .
+///
+public static class AzureOpenAIPluginCollectionExtensions
+{
+ ///
+ /// Given an object, tries to retrieve the corresponding and populate with its parameters.
+ ///
+ /// The plugins.
+ /// The object.
+ /// When this method returns, the function that was retrieved if one with the specified name was found; otherwise,
+ /// When this method returns, the arguments for the function; otherwise,
+ /// if the function was found; otherwise, .
+ public static bool TryGetFunctionAndArguments(
+ this IReadOnlyKernelPluginCollection plugins,
+ ChatCompletionsFunctionToolCall functionToolCall,
+ [NotNullWhen(true)] out KernelFunction? function,
+ out KernelArguments? arguments) =>
+ plugins.TryGetFunctionAndArguments(new AzureOpenAIFunctionToolCall(functionToolCall), out function, out arguments);
+
+ ///
+ /// Given an object, tries to retrieve the corresponding and populate with its parameters.
+ ///
+ /// The plugins.
+ /// The object.
+ /// When this method returns, the function that was retrieved if one with the specified name was found; otherwise,
+ /// When this method returns, the arguments for the function; otherwise,
+ /// if the function was found; otherwise, .
+ public static bool TryGetFunctionAndArguments(
+ this IReadOnlyKernelPluginCollection plugins,
+ AzureOpenAIFunctionToolCall functionToolCall,
+ [NotNullWhen(true)] out KernelFunction? function,
+ out KernelArguments? arguments)
+ {
+ if (plugins.TryGetFunction(functionToolCall.PluginName, functionToolCall.FunctionName, out function))
+ {
+ // Add parameters to arguments
+ arguments = null;
+ if (functionToolCall.Arguments is not null)
+ {
+ arguments = [];
+ foreach (var parameter in functionToolCall.Arguments)
+ {
+ arguments[parameter.Key] = parameter.Value?.ToString();
+ }
+ }
+
+ return true;
+ }
+
+ // Function not found in collection
+ arguments = null;
+ return false;
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIStreamingChatMessageContent.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIStreamingChatMessageContent.cs
new file mode 100644
index 000000000000..c1843b185f89
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIStreamingChatMessageContent.cs
@@ -0,0 +1,87 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Collections.Generic;
+using System.Text;
+using Azure.AI.OpenAI;
+using Microsoft.SemanticKernel.ChatCompletion;
+
+namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+
+///
+/// Azure OpenAI specialized streaming chat message content.
+///
+///
+/// Represents a chat message content chunk that was streamed from the remote model.
+///
+public sealed class AzureOpenAIStreamingChatMessageContent : StreamingChatMessageContent
+{
+ ///
+ /// The reason why the completion finished.
+ ///
+ public CompletionsFinishReason? FinishReason { get; set; }
+
+ ///
+ /// Create a new instance of the class.
+ ///
+ /// Internal Azure SDK Message update representation
+ /// Index of the choice
+ /// The model ID used to generate the content
+ /// Additional metadata
+ internal AzureOpenAIStreamingChatMessageContent(
+ StreamingChatCompletionsUpdate chatUpdate,
+ int choiceIndex,
+ string modelId,
+ IReadOnlyDictionary? metadata = null)
+ : base(
+ chatUpdate.Role.HasValue ? new AuthorRole(chatUpdate.Role.Value.ToString()) : null,
+ chatUpdate.ContentUpdate,
+ chatUpdate,
+ choiceIndex,
+ modelId,
+ Encoding.UTF8,
+ metadata)
+ {
+ this.ToolCallUpdate = chatUpdate.ToolCallUpdate;
+ this.FinishReason = chatUpdate?.FinishReason;
+ }
+
+ ///
+ /// Create a new instance of the class.
+ ///
+ /// Author role of the message
+ /// Content of the message
+ /// Tool call update
+ /// Completion finish reason
+ /// Index of the choice
+ /// The model ID used to generate the content
+ /// Additional metadata
+ internal AzureOpenAIStreamingChatMessageContent(
+ AuthorRole? authorRole,
+ string? content,
+ StreamingToolCallUpdate? tootToolCallUpdate = null,
+ CompletionsFinishReason? completionsFinishReason = null,
+ int choiceIndex = 0,
+ string? modelId = null,
+ IReadOnlyDictionary? metadata = null)
+ : base(
+ authorRole,
+ content,
+ null,
+ choiceIndex,
+ modelId,
+ Encoding.UTF8,
+ metadata)
+ {
+ this.ToolCallUpdate = tootToolCallUpdate;
+ this.FinishReason = completionsFinishReason;
+ }
+
+ /// Gets any update information in the message about a tool call.
+ public StreamingToolCallUpdate? ToolCallUpdate { get; }
+
+ ///
+ public override byte[] ToByteArray() => this.Encoding.GetBytes(this.ToString());
+
+ ///
+ public override string ToString() => this.Content ?? string.Empty;
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIStreamingTextContent.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIStreamingTextContent.cs
new file mode 100644
index 000000000000..9d9497fd68d5
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIStreamingTextContent.cs
@@ -0,0 +1,51 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Collections.Generic;
+using System.Text;
+
+namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+
+///
+/// Azure OpenAI specialized streaming text content.
+///
+///
+/// Represents a text content chunk that was streamed from the remote model.
+///
+public sealed class AzureOpenAIStreamingTextContent : StreamingTextContent
+{
+ ///
+ /// Create a new instance of the class.
+ ///
+ /// Text update
+ /// Index of the choice
+ /// The model ID used to generate the content
+ /// Inner chunk object
+ /// Metadata information
+ internal AzureOpenAIStreamingTextContent(
+ string text,
+ int choiceIndex,
+ string modelId,
+ object? innerContentObject = null,
+ IReadOnlyDictionary? metadata = null)
+ : base(
+ text,
+ choiceIndex,
+ modelId,
+ innerContentObject,
+ Encoding.UTF8,
+ metadata)
+ {
+ }
+
+ ///
+ public override byte[] ToByteArray()
+ {
+ return this.Encoding.GetBytes(this.ToString());
+ }
+
+ ///
+ public override string ToString()
+ {
+ return this.Text ?? string.Empty;
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/ClientCore.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/ClientCore.cs
new file mode 100644
index 000000000000..dda7578da8ea
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/ClientCore.cs
@@ -0,0 +1,1574 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Diagnostics.Metrics;
+using System.Linq;
+using System.Net.Http;
+using System.Runtime.CompilerServices;
+using System.Text;
+using System.Text.Json;
+using System.Threading;
+using System.Threading.Tasks;
+using Azure;
+using Azure.AI.OpenAI;
+using Azure.Core;
+using Azure.Core.Pipeline;
+using Microsoft.Extensions.Logging;
+using Microsoft.Extensions.Logging.Abstractions;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Diagnostics;
+using Microsoft.SemanticKernel.Http;
+
+#pragma warning disable CA2208 // Instantiate argument exceptions correctly
+
+namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+
+///
+/// Base class for AI clients that provides common functionality for interacting with OpenAI services.
+///
+internal abstract class ClientCore
+{
+ private const string ModelProvider = "openai";
+ private const int MaxResultsPerPrompt = 128;
+
+ ///
+ /// The maximum number of auto-invokes that can be in-flight at any given time as part of the current
+ /// asynchronous chain of execution.
+ ///
+ ///
+ /// This is a fail-safe mechanism. If someone accidentally manages to set up execution settings in such a way that
+ /// auto-invocation is invoked recursively, and in particular where a prompt function is able to auto-invoke itself,
+ /// we could end up in an infinite loop. This const is a backstop against that happening. We should never come close
+ /// to this limit, but if we do, auto-invoke will be disabled for the current flow in order to prevent runaway execution.
+ /// With the current setup, the way this could possibly happen is if a prompt function is configured with built-in
+ /// execution settings that opt-in to auto-invocation of everything in the kernel, in which case the invocation of that
+ /// prompt function could advertize itself as a candidate for auto-invocation. We don't want to outright block that,
+ /// if that's something a developer has asked to do (e.g. it might be invoked with different arguments than its parent
+ /// was invoked with), but we do want to limit it. This limit is arbitrary and can be tweaked in the future and/or made
+ /// configurable should need arise.
+ ///
+ private const int MaxInflightAutoInvokes = 128;
+
+ /// Singleton tool used when tool call count drops to 0 but we need to supply tools to keep the service happy.
+ private static readonly ChatCompletionsFunctionToolDefinition s_nonInvocableFunctionTool = new() { Name = "NonInvocableTool" };
+
+ /// Tracking for .
+ private static readonly AsyncLocal s_inflightAutoInvokes = new();
+
+ internal ClientCore(ILogger? logger = null)
+ {
+ this.Logger = logger ?? NullLogger.Instance;
+ }
+
+ ///
+ /// Model Id or Deployment Name
+ ///
+ internal string DeploymentOrModelName { get; set; } = string.Empty;
+
+ ///
+ /// OpenAI / Azure OpenAI Client
+ ///
+ internal abstract OpenAIClient Client { get; }
+
+ internal Uri? Endpoint { get; set; } = null;
+
+ ///
+ /// Logger instance
+ ///
+ internal ILogger Logger { get; set; }
+
+ ///
+ /// Storage for AI service attributes.
+ ///
+ internal Dictionary Attributes { get; } = [];
+
+ ///
+ /// Instance of for metrics.
+ ///
+ private static readonly Meter s_meter = new("Microsoft.SemanticKernel.Connectors.OpenAI");
+
+ ///
+ /// Instance of to keep track of the number of prompt tokens used.
+ ///
+ private static readonly Counter s_promptTokensCounter =
+ s_meter.CreateCounter(
+ name: "semantic_kernel.connectors.openai.tokens.prompt",
+ unit: "{token}",
+ description: "Number of prompt tokens used");
+
+ ///
+ /// Instance of to keep track of the number of completion tokens used.
+ ///
+ private static readonly Counter s_completionTokensCounter =
+ s_meter.CreateCounter(
+ name: "semantic_kernel.connectors.openai.tokens.completion",
+ unit: "{token}",
+ description: "Number of completion tokens used");
+
+ ///
+ /// Instance of to keep track of the total number of tokens used.
+ ///
+ private static readonly Counter s_totalTokensCounter =
+ s_meter.CreateCounter(
+ name: "semantic_kernel.connectors.openai.tokens.total",
+ unit: "{token}",
+ description: "Number of tokens used");
+
+ ///
+ /// Creates completions for the prompt and settings.
+ ///
+ /// The prompt to complete.
+ /// Execution settings for the completion API.
+ /// The containing services, plugins, and other state for use throughout the operation.
+ /// The to monitor for cancellation requests. The default is .
+ /// Completions generated by the remote model
+ internal async Task> GetTextResultsAsync(
+ string prompt,
+ PromptExecutionSettings? executionSettings,
+ Kernel? kernel,
+ CancellationToken cancellationToken = default)
+ {
+ AzureOpenAIPromptExecutionSettings textExecutionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings, AzureOpenAIPromptExecutionSettings.DefaultTextMaxTokens);
+
+ ValidateMaxTokens(textExecutionSettings.MaxTokens);
+
+ var options = CreateCompletionsOptions(prompt, textExecutionSettings, this.DeploymentOrModelName);
+
+ Completions? responseData = null;
+ List responseContent;
+ using (var activity = ModelDiagnostics.StartCompletionActivity(this.Endpoint, this.DeploymentOrModelName, ModelProvider, prompt, textExecutionSettings))
+ {
+ try
+ {
+ responseData = (await RunRequestAsync(() => this.Client.GetCompletionsAsync(options, cancellationToken)).ConfigureAwait(false)).Value;
+ if (responseData.Choices.Count == 0)
+ {
+ throw new KernelException("Text completions not found");
+ }
+ }
+ catch (Exception ex) when (activity is not null)
+ {
+ activity.SetError(ex);
+ if (responseData != null)
+ {
+ // Capture available metadata even if the operation failed.
+ activity
+ .SetResponseId(responseData.Id)
+ .SetPromptTokenUsage(responseData.Usage.PromptTokens)
+ .SetCompletionTokenUsage(responseData.Usage.CompletionTokens);
+ }
+ throw;
+ }
+
+ responseContent = responseData.Choices.Select(choice => new TextContent(choice.Text, this.DeploymentOrModelName, choice, Encoding.UTF8, GetTextChoiceMetadata(responseData, choice))).ToList();
+ activity?.SetCompletionResponse(responseContent, responseData.Usage.PromptTokens, responseData.Usage.CompletionTokens);
+ }
+
+ this.LogUsage(responseData.Usage);
+
+ return responseContent;
+ }
+
+ internal async IAsyncEnumerable GetStreamingTextContentsAsync(
+ string prompt,
+ PromptExecutionSettings? executionSettings,
+ Kernel? kernel,
+ [EnumeratorCancellation] CancellationToken cancellationToken = default)
+ {
+ AzureOpenAIPromptExecutionSettings textExecutionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings, AzureOpenAIPromptExecutionSettings.DefaultTextMaxTokens);
+
+ ValidateMaxTokens(textExecutionSettings.MaxTokens);
+
+ var options = CreateCompletionsOptions(prompt, textExecutionSettings, this.DeploymentOrModelName);
+
+ using var activity = ModelDiagnostics.StartCompletionActivity(this.Endpoint, this.DeploymentOrModelName, ModelProvider, prompt, textExecutionSettings);
+
+ StreamingResponse response;
+ try
+ {
+ response = await RunRequestAsync(() => this.Client.GetCompletionsStreamingAsync(options, cancellationToken)).ConfigureAwait(false);
+ }
+ catch (Exception ex) when (activity is not null)
+ {
+ activity.SetError(ex);
+ throw;
+ }
+
+ var responseEnumerator = response.ConfigureAwait(false).GetAsyncEnumerator();
+ List? streamedContents = activity is not null ? [] : null;
+ try
+ {
+ while (true)
+ {
+ try
+ {
+ if (!await responseEnumerator.MoveNextAsync())
+ {
+ break;
+ }
+ }
+ catch (Exception ex) when (activity is not null)
+ {
+ activity.SetError(ex);
+ throw;
+ }
+
+ Completions completions = responseEnumerator.Current;
+ foreach (Choice choice in completions.Choices)
+ {
+ var openAIStreamingTextContent = new AzureOpenAIStreamingTextContent(
+ choice.Text, choice.Index, this.DeploymentOrModelName, choice, GetTextChoiceMetadata(completions, choice));
+ streamedContents?.Add(openAIStreamingTextContent);
+ yield return openAIStreamingTextContent;
+ }
+ }
+ }
+ finally
+ {
+ activity?.EndStreaming(streamedContents);
+ await responseEnumerator.DisposeAsync();
+ }
+ }
+
+ private static Dictionary GetTextChoiceMetadata(Completions completions, Choice choice)
+ {
+ return new Dictionary(8)
+ {
+ { nameof(completions.Id), completions.Id },
+ { nameof(completions.Created), completions.Created },
+ { nameof(completions.PromptFilterResults), completions.PromptFilterResults },
+ { nameof(completions.Usage), completions.Usage },
+ { nameof(choice.ContentFilterResults), choice.ContentFilterResults },
+
+ // Serialization of this struct behaves as an empty object {}, need to cast to string to avoid it.
+ { nameof(choice.FinishReason), choice.FinishReason?.ToString() },
+
+ { nameof(choice.LogProbabilityModel), choice.LogProbabilityModel },
+ { nameof(choice.Index), choice.Index },
+ };
+ }
+
+ private static Dictionary GetChatChoiceMetadata(ChatCompletions completions, ChatChoice chatChoice)
+ {
+ return new Dictionary(12)
+ {
+ { nameof(completions.Id), completions.Id },
+ { nameof(completions.Created), completions.Created },
+ { nameof(completions.PromptFilterResults), completions.PromptFilterResults },
+ { nameof(completions.SystemFingerprint), completions.SystemFingerprint },
+ { nameof(completions.Usage), completions.Usage },
+ { nameof(chatChoice.ContentFilterResults), chatChoice.ContentFilterResults },
+
+ // Serialization of this struct behaves as an empty object {}, need to cast to string to avoid it.
+ { nameof(chatChoice.FinishReason), chatChoice.FinishReason?.ToString() },
+
+ { nameof(chatChoice.FinishDetails), chatChoice.FinishDetails },
+ { nameof(chatChoice.LogProbabilityInfo), chatChoice.LogProbabilityInfo },
+ { nameof(chatChoice.Index), chatChoice.Index },
+ { nameof(chatChoice.Enhancements), chatChoice.Enhancements },
+ };
+ }
+
+ private static Dictionary GetResponseMetadata(StreamingChatCompletionsUpdate completions)
+ {
+ return new Dictionary(4)
+ {
+ { nameof(completions.Id), completions.Id },
+ { nameof(completions.Created), completions.Created },
+ { nameof(completions.SystemFingerprint), completions.SystemFingerprint },
+
+ // Serialization of this struct behaves as an empty object {}, need to cast to string to avoid it.
+ { nameof(completions.FinishReason), completions.FinishReason?.ToString() },
+ };
+ }
+
+ private static Dictionary GetResponseMetadata(AudioTranscription audioTranscription)
+ {
+ return new Dictionary(3)
+ {
+ { nameof(audioTranscription.Language), audioTranscription.Language },
+ { nameof(audioTranscription.Duration), audioTranscription.Duration },
+ { nameof(audioTranscription.Segments), audioTranscription.Segments }
+ };
+ }
+
+ ///
+ /// Generates an embedding from the given .
+ ///
+ /// List of strings to generate embeddings for
+ /// The containing services, plugins, and other state for use throughout the operation.
+ /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models.
+ /// The to monitor for cancellation requests. The default is .
+ /// List of embeddings
+ internal async Task>> GetEmbeddingsAsync(
+ IList data,
+ Kernel? kernel,
+ int? dimensions,
+ CancellationToken cancellationToken)
+ {
+ var result = new List>(data.Count);
+
+ if (data.Count > 0)
+ {
+ var embeddingsOptions = new EmbeddingsOptions(this.DeploymentOrModelName, data)
+ {
+ Dimensions = dimensions
+ };
+
+ var response = await RunRequestAsync(() => this.Client.GetEmbeddingsAsync(embeddingsOptions, cancellationToken)).ConfigureAwait(false);
+ var embeddings = response.Value.Data;
+
+ if (embeddings.Count != data.Count)
+ {
+ throw new KernelException($"Expected {data.Count} text embedding(s), but received {embeddings.Count}");
+ }
+
+ for (var i = 0; i < embeddings.Count; i++)
+ {
+ result.Add(embeddings[i].Embedding);
+ }
+ }
+
+ return result;
+ }
+
+ //internal async Task> GetTextContentFromAudioAsync(
+ // AudioContent content,
+ // PromptExecutionSettings? executionSettings,
+ // CancellationToken cancellationToken)
+ //{
+ // Verify.NotNull(content.Data);
+ // var audioData = content.Data.Value;
+ // if (audioData.IsEmpty)
+ // {
+ // throw new ArgumentException("Audio data cannot be empty", nameof(content));
+ // }
+
+ // OpenAIAudioToTextExecutionSettings? audioExecutionSettings = OpenAIAudioToTextExecutionSettings.FromExecutionSettings(executionSettings);
+
+ // Verify.ValidFilename(audioExecutionSettings?.Filename);
+
+ // var audioOptions = new AudioTranscriptionOptions
+ // {
+ // AudioData = BinaryData.FromBytes(audioData),
+ // DeploymentName = this.DeploymentOrModelName,
+ // Filename = audioExecutionSettings.Filename,
+ // Language = audioExecutionSettings.Language,
+ // Prompt = audioExecutionSettings.Prompt,
+ // ResponseFormat = audioExecutionSettings.ResponseFormat,
+ // Temperature = audioExecutionSettings.Temperature
+ // };
+
+ // AudioTranscription responseData = (await RunRequestAsync(() => this.Client.GetAudioTranscriptionAsync(audioOptions, cancellationToken)).ConfigureAwait(false)).Value;
+
+ // return [new(responseData.Text, this.DeploymentOrModelName, metadata: GetResponseMetadata(responseData))];
+ //}
+
+ ///
+ /// Generate a new chat message
+ ///
+ /// Chat history
+ /// Execution settings for the completion API.
+ /// The containing services, plugins, and other state for use throughout the operation.
+ /// Async cancellation token
+ /// Generated chat message in string format
+ internal async Task> GetChatMessageContentsAsync(
+ ChatHistory chat,
+ PromptExecutionSettings? executionSettings,
+ Kernel? kernel,
+ CancellationToken cancellationToken = default)
+ {
+ Verify.NotNull(chat);
+
+ // Convert the incoming execution settings to OpenAI settings.
+ AzureOpenAIPromptExecutionSettings chatExecutionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings);
+ bool autoInvoke = kernel is not null && chatExecutionSettings.ToolCallBehavior?.MaximumAutoInvokeAttempts > 0 && s_inflightAutoInvokes.Value < MaxInflightAutoInvokes;
+ ValidateMaxTokens(chatExecutionSettings.MaxTokens);
+ ValidateAutoInvoke(autoInvoke, chatExecutionSettings.ResultsPerPrompt);
+
+ // Create the Azure SDK ChatCompletionOptions instance from all available information.
+ var chatOptions = this.CreateChatCompletionsOptions(chatExecutionSettings, chat, kernel, this.DeploymentOrModelName);
+
+ for (int requestIndex = 1; ; requestIndex++)
+ {
+ // Make the request.
+ ChatCompletions? responseData = null;
+ List responseContent;
+ using (var activity = ModelDiagnostics.StartCompletionActivity(this.Endpoint, this.DeploymentOrModelName, ModelProvider, chat, chatExecutionSettings))
+ {
+ try
+ {
+ responseData = (await RunRequestAsync(() => this.Client.GetChatCompletionsAsync(chatOptions, cancellationToken)).ConfigureAwait(false)).Value;
+ this.LogUsage(responseData.Usage);
+ if (responseData.Choices.Count == 0)
+ {
+ throw new KernelException("Chat completions not found");
+ }
+ }
+ catch (Exception ex) when (activity is not null)
+ {
+ activity.SetError(ex);
+ if (responseData != null)
+ {
+ // Capture available metadata even if the operation failed.
+ activity
+ .SetResponseId(responseData.Id)
+ .SetPromptTokenUsage(responseData.Usage.PromptTokens)
+ .SetCompletionTokenUsage(responseData.Usage.CompletionTokens);
+ }
+ throw;
+ }
+
+ responseContent = responseData.Choices.Select(chatChoice => this.GetChatMessage(chatChoice, responseData)).ToList();
+ activity?.SetCompletionResponse(responseContent, responseData.Usage.PromptTokens, responseData.Usage.CompletionTokens);
+ }
+
+ // If we don't want to attempt to invoke any functions, just return the result.
+ // Or if we are auto-invoking but we somehow end up with other than 1 choice even though only 1 was requested, similarly bail.
+ if (!autoInvoke || responseData.Choices.Count != 1)
+ {
+ return responseContent;
+ }
+
+ Debug.Assert(kernel is not null);
+
+ // Get our single result and extract the function call information. If this isn't a function call, or if it is
+ // but we're unable to find the function or extract the relevant information, just return the single result.
+ // Note that we don't check the FinishReason and instead check whether there are any tool calls, as the service
+ // may return a FinishReason of "stop" even if there are tool calls to be made, in particular if a required tool
+ // is specified.
+ ChatChoice resultChoice = responseData.Choices[0];
+ AzureOpenAIChatMessageContent result = this.GetChatMessage(resultChoice, responseData);
+ if (result.ToolCalls.Count == 0)
+ {
+ return [result];
+ }
+
+ if (this.Logger.IsEnabled(LogLevel.Debug))
+ {
+ this.Logger.LogDebug("Tool requests: {Requests}", result.ToolCalls.Count);
+ }
+ if (this.Logger.IsEnabled(LogLevel.Trace))
+ {
+ this.Logger.LogTrace("Function call requests: {Requests}", string.Join(", ", result.ToolCalls.OfType().Select(ftc => $"{ftc.Name}({ftc.Arguments})")));
+ }
+
+ // Add the original assistant message to the chatOptions; this is required for the service
+ // to understand the tool call responses. Also add the result message to the caller's chat
+ // history: if they don't want it, they can remove it, but this makes the data available,
+ // including metadata like usage.
+ chatOptions.Messages.Add(GetRequestMessage(resultChoice.Message));
+ chat.Add(result);
+
+ // We must send back a response for every tool call, regardless of whether we successfully executed it or not.
+ // If we successfully execute it, we'll add the result. If we don't, we'll add an error.
+ for (int toolCallIndex = 0; toolCallIndex < result.ToolCalls.Count; toolCallIndex++)
+ {
+ ChatCompletionsToolCall toolCall = result.ToolCalls[toolCallIndex];
+
+ // We currently only know about function tool calls. If it's anything else, we'll respond with an error.
+ if (toolCall is not ChatCompletionsFunctionToolCall functionToolCall)
+ {
+ AddResponseMessage(chatOptions, chat, result: null, "Error: Tool call was not a function call.", toolCall, this.Logger);
+ continue;
+ }
+
+ // Parse the function call arguments.
+ AzureOpenAIFunctionToolCall? openAIFunctionToolCall;
+ try
+ {
+ openAIFunctionToolCall = new(functionToolCall);
+ }
+ catch (JsonException)
+ {
+ AddResponseMessage(chatOptions, chat, result: null, "Error: Function call arguments were invalid JSON.", toolCall, this.Logger);
+ continue;
+ }
+
+ // Make sure the requested function is one we requested. If we're permitting any kernel function to be invoked,
+ // then we don't need to check this, as it'll be handled when we look up the function in the kernel to be able
+ // to invoke it. If we're permitting only a specific list of functions, though, then we need to explicitly check.
+ if (chatExecutionSettings.ToolCallBehavior?.AllowAnyRequestedKernelFunction is not true &&
+ !IsRequestableTool(chatOptions, openAIFunctionToolCall))
+ {
+ AddResponseMessage(chatOptions, chat, result: null, "Error: Function call request for a function that wasn't defined.", toolCall, this.Logger);
+ continue;
+ }
+
+ // Find the function in the kernel and populate the arguments.
+ if (!kernel!.Plugins.TryGetFunctionAndArguments(openAIFunctionToolCall, out KernelFunction? function, out KernelArguments? functionArgs))
+ {
+ AddResponseMessage(chatOptions, chat, result: null, "Error: Requested function could not be found.", toolCall, this.Logger);
+ continue;
+ }
+
+ // Now, invoke the function, and add the resulting tool call message to the chat options.
+ FunctionResult functionResult = new(function) { Culture = kernel.Culture };
+ AutoFunctionInvocationContext invocationContext = new(kernel, function, functionResult, chat)
+ {
+ Arguments = functionArgs,
+ RequestSequenceIndex = requestIndex - 1,
+ FunctionSequenceIndex = toolCallIndex,
+ FunctionCount = result.ToolCalls.Count
+ };
+
+ s_inflightAutoInvokes.Value++;
+ try
+ {
+ invocationContext = await OnAutoFunctionInvocationAsync(kernel, invocationContext, async (context) =>
+ {
+ // Check if filter requested termination.
+ if (context.Terminate)
+ {
+ return;
+ }
+
+ // Note that we explicitly do not use executionSettings here; those pertain to the all-up operation and not necessarily to any
+ // further calls made as part of this function invocation. In particular, we must not use function calling settings naively here,
+ // as the called function could in turn telling the model about itself as a possible candidate for invocation.
+ context.Result = await function.InvokeAsync(kernel, invocationContext.Arguments, cancellationToken: cancellationToken).ConfigureAwait(false);
+ }).ConfigureAwait(false);
+ }
+#pragma warning disable CA1031 // Do not catch general exception types
+ catch (Exception e)
+#pragma warning restore CA1031 // Do not catch general exception types
+ {
+ AddResponseMessage(chatOptions, chat, null, $"Error: Exception while invoking function. {e.Message}", toolCall, this.Logger);
+ continue;
+ }
+ finally
+ {
+ s_inflightAutoInvokes.Value--;
+ }
+
+ // Apply any changes from the auto function invocation filters context to final result.
+ functionResult = invocationContext.Result;
+
+ object functionResultValue = functionResult.GetValue
-
-
+
+
+
+
+
@@ -44,6 +47,9 @@
Always
+
+ Always
+
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Core/ClientCoreTests.cs b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Core/ClientCoreTests.cs
index a3415663459a..f162e1d7334c 100644
--- a/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Core/ClientCoreTests.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Core/ClientCoreTests.cs
@@ -11,6 +11,7 @@
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.SemanticKernel.Connectors.OpenAI;
using Microsoft.SemanticKernel.Http;
+using Microsoft.SemanticKernel.Services;
using Moq;
using OpenAI;
using Xunit;
@@ -23,7 +24,7 @@ public void ItCanBeInstantiatedAndPropertiesSetAsExpected()
{
// Act
var logger = new Mock>().Object;
- var openAIClient = new OpenAIClient(new ApiKeyCredential("key"));
+ var openAIClient = new OpenAIClient("key");
var clientCoreModelConstructor = new ClientCore("model1", "apiKey");
var clientCoreOpenAIClientConstructor = new ClientCore("model1", openAIClient, logger: logger);
@@ -67,6 +68,8 @@ public void ItUsesEndpointAsExpected(string? clientBaseAddress, string? provided
// Assert
Assert.Equal(endpoint ?? client?.BaseAddress ?? new Uri("https://api.openai.com/v1"), clientCore.Endpoint);
+ Assert.True(clientCore.Attributes.ContainsKey(AIServiceExtensions.EndpointKey));
+ Assert.Equal(endpoint?.ToString() ?? client?.BaseAddress?.ToString() ?? "https://api.openai.com/v1", clientCore.Attributes[AIServiceExtensions.EndpointKey]);
client?.Dispose();
}
@@ -142,7 +145,7 @@ public async Task ItDoNotAddSemanticKernelHeadersWhenOpenAIClientIsProvidedAsync
var clientCore = new ClientCore(
modelId: "model",
openAIClient: new OpenAIClient(
- new ApiKeyCredential("test"),
+ "test",
new OpenAIClientOptions()
{
Transport = new HttpClientPipelineTransport(client),
@@ -185,4 +188,65 @@ public void ItAddAttributesButDoesNothingIfNullOrEmpty(string? value)
Assert.Equal(value, clientCore.Attributes["key"]);
}
}
+
+ [Fact]
+ public void ItAddModelIdAttributeAsExpected()
+ {
+ // Arrange
+ var expectedModelId = "modelId";
+
+ // Act
+ var clientCore = new ClientCore(expectedModelId, "apikey");
+ var clientCoreBreakingGlass = new ClientCore(expectedModelId, new OpenAIClient(" "));
+
+ // Assert
+ Assert.True(clientCore.Attributes.ContainsKey(AIServiceExtensions.ModelIdKey));
+ Assert.True(clientCoreBreakingGlass.Attributes.ContainsKey(AIServiceExtensions.ModelIdKey));
+ Assert.Equal(expectedModelId, clientCore.Attributes[AIServiceExtensions.ModelIdKey]);
+ Assert.Equal(expectedModelId, clientCoreBreakingGlass.Attributes[AIServiceExtensions.ModelIdKey]);
+ }
+
+ [Fact]
+ public void ItAddOrNotOrganizationIdAttributeWhenProvided()
+ {
+ // Arrange
+ var expectedOrganizationId = "organizationId";
+
+ // Act
+ var clientCore = new ClientCore("modelId", "apikey", expectedOrganizationId);
+ var clientCoreWithoutOrgId = new ClientCore("modelId", "apikey");
+
+ // Assert
+ Assert.True(clientCore.Attributes.ContainsKey(ClientCore.OrganizationKey));
+ Assert.Equal(expectedOrganizationId, clientCore.Attributes[ClientCore.OrganizationKey]);
+ Assert.False(clientCoreWithoutOrgId.Attributes.ContainsKey(ClientCore.OrganizationKey));
+ }
+
+ [Fact]
+ public void ItThrowsIfModelIdIsNotProvided()
+ {
+ // Act & Assert
+ Assert.Throws(() => new ClientCore(" ", "apikey"));
+ Assert.Throws(() => new ClientCore("", "apikey"));
+ Assert.Throws(() => new ClientCore(null!));
+ }
+
+ [Fact]
+ public void ItThrowsWhenNotUsingCustomEndpointAndApiKeyIsNotProvided()
+ {
+ // Act & Assert
+ Assert.Throws(() => new ClientCore("modelId", " "));
+ Assert.Throws(() => new ClientCore("modelId", ""));
+ Assert.Throws(() => new ClientCore("modelId", apiKey: null!));
+ }
+
+ [Fact]
+ public void ItDoesNotThrowWhenUsingCustomEndpointAndApiKeyIsNotProvided()
+ {
+ // Act & Assert
+ ClientCore? clientCore = null;
+ clientCore = new ClientCore("modelId", " ", endpoint: new Uri("http://localhost"));
+ clientCore = new ClientCore("modelId", "", endpoint: new Uri("http://localhost"));
+ clientCore = new ClientCore("modelId", apiKey: null!, endpoint: new Uri("http://localhost"));
+ }
}
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Extensions/KernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Extensions/KernelBuilderExtensionsTests.cs
new file mode 100644
index 000000000000..f296000c5245
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Extensions/KernelBuilderExtensionsTests.cs
@@ -0,0 +1,73 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Embeddings;
+using Microsoft.SemanticKernel.Services;
+using Microsoft.SemanticKernel.TextToImage;
+using OpenAI;
+using Xunit;
+
+namespace SemanticKernel.Connectors.OpenAI.UnitTests.Extensions;
+
+public class KernelBuilderExtensionsTests
+{
+ [Fact]
+ public void ItCanAddTextEmbeddingGenerationService()
+ {
+ // Arrange
+ var sut = Kernel.CreateBuilder();
+
+ // Act
+ var service = sut.AddOpenAITextEmbeddingGeneration("model", "key")
+ .Build()
+ .GetRequiredService();
+
+ // Assert
+ Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]);
+ }
+
+ [Fact]
+ public void ItCanAddTextEmbeddingGenerationServiceWithOpenAIClient()
+ {
+ // Arrange
+ var sut = Kernel.CreateBuilder();
+
+ // Act
+ var service = sut.AddOpenAITextEmbeddingGeneration("model", new OpenAIClient("key"))
+ .Build()
+ .GetRequiredService();
+
+ // Assert
+ Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]);
+ }
+
+ [Fact]
+ public void ItCanAddTextToImageService()
+ {
+ // Arrange
+ var sut = Kernel.CreateBuilder();
+
+ // Act
+ var service = sut.AddOpenAITextToImage("model", "key")
+ .Build()
+ .GetRequiredService();
+
+ // Assert
+ Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]);
+ }
+
+ [Fact]
+ public void ItCanAddTextToImageServiceWithOpenAIClient()
+ {
+ // Arrange
+ var sut = Kernel.CreateBuilder();
+
+ // Act
+ var service = sut.AddOpenAITextToImage("model", new OpenAIClient("key"))
+ .Build()
+ .GetRequiredService();
+
+ // Assert
+ Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]);
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Extensions/ServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Extensions/ServiceCollectionExtensionsTests.cs
new file mode 100644
index 000000000000..65db68eea180
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Extensions/ServiceCollectionExtensionsTests.cs
@@ -0,0 +1,74 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Embeddings;
+using Microsoft.SemanticKernel.Services;
+using Microsoft.SemanticKernel.TextToImage;
+using OpenAI;
+using Xunit;
+
+namespace SemanticKernel.Connectors.OpenAI.UnitTests.Extensions;
+
+public class ServiceCollectionExtensionsTests
+{
+ [Fact]
+ public void ItCanAddTextEmbeddingGenerationService()
+ {
+ // Arrange
+ var sut = new ServiceCollection();
+
+ // Act
+ var service = sut.AddOpenAITextEmbeddingGeneration("model", "key")
+ .BuildServiceProvider()
+ .GetRequiredService();
+
+ // Assert
+ Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]);
+ }
+
+ [Fact]
+ public void ItCanAddTextEmbeddingGenerationServiceWithOpenAIClient()
+ {
+ // Arrange
+ var sut = new ServiceCollection();
+
+ // Act
+ var service = sut.AddOpenAITextEmbeddingGeneration("model", new OpenAIClient("key"))
+ .BuildServiceProvider()
+ .GetRequiredService();
+
+ // Assert
+ Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]);
+ }
+
+ [Fact]
+ public void ItCanAddImageToTextService()
+ {
+ // Arrange
+ var sut = new ServiceCollection();
+
+ // Act
+ var service = sut.AddOpenAITextToImage("model", "key")
+ .BuildServiceProvider()
+ .GetRequiredService();
+
+ // Assert
+ Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]);
+ }
+
+ [Fact]
+ public void ItCanAddImageToTextServiceWithOpenAIClient()
+ {
+ // Arrange
+ var sut = new ServiceCollection();
+
+ // Act
+ var service = sut.AddOpenAITextToImage("model", new OpenAIClient("key"))
+ .BuildServiceProvider()
+ .GetRequiredService();
+
+ // Assert
+ Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]);
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Services/OpenAITextEmbeddingGenerationServiceTests.cs b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Services/OpenAITextEmbeddingGenerationServiceTests.cs
index 25cdc4ec61aa..5fb36efc0349 100644
--- a/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Services/OpenAITextEmbeddingGenerationServiceTests.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Services/OpenAITextEmbeddingGenerationServiceTests.cs
@@ -6,13 +6,19 @@
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
+using Microsoft.Extensions.Logging;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Connectors.OpenAI;
using Microsoft.SemanticKernel.Services;
+using Moq;
using OpenAI;
using Xunit;
namespace SemanticKernel.Connectors.OpenAI.UnitTests.Services;
+
+///
+/// Unit tests for class.
+///
public class OpenAITextEmbeddingGenerationServiceTests
{
[Fact]
@@ -43,8 +49,9 @@ public async Task ItGetEmbeddingsAsyncReturnsEmptyWhenProvidedDataIsEmpty()
}
[Fact]
- public async Task IGetEmbeddingsAsyncReturnsEmptyWhenProvidedDataIsWhitespace()
+ public async Task GetEmbeddingsAsyncReturnsEmptyWhenProvidedDataIsWhitespace()
{
+ // Arrange
using HttpMessageHandlerStub handler = new()
{
ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK)
@@ -54,7 +61,6 @@ public async Task IGetEmbeddingsAsyncReturnsEmptyWhenProvidedDataIsWhitespace()
};
using HttpClient client = new(handler);
- // Arrange
var sut = new OpenAITextEmbeddingGenerationService("model", "apikey", httpClient: client);
// Act
@@ -68,6 +74,7 @@ public async Task IGetEmbeddingsAsyncReturnsEmptyWhenProvidedDataIsWhitespace()
[Fact]
public async Task ItThrowsIfNumberOfResultsDiffersFromInputsAsync()
{
+ // Arrange
using HttpMessageHandlerStub handler = new()
{
ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK)
@@ -77,10 +84,38 @@ public async Task ItThrowsIfNumberOfResultsDiffersFromInputsAsync()
};
using HttpClient client = new(handler);
- // Arrange
var sut = new OpenAITextEmbeddingGenerationService("model", "apikey", httpClient: client);
// Act & Assert
await Assert.ThrowsAsync(async () => await sut.GenerateEmbeddingsAsync(["test"], null, CancellationToken.None));
}
+
+ [Fact]
+ public async Task GetEmbeddingsDoesLogActionAsync()
+ {
+ // Arrange
+ using HttpMessageHandlerStub handler = new()
+ {
+ ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK)
+ {
+ Content = new StringContent(File.ReadAllText("./TestData/text-embeddings-response.txt"))
+ }
+ };
+ using HttpClient client = new(handler);
+
+ var modelId = "dall-e-2";
+ var logger = new Mock>();
+ logger.Setup(l => l.IsEnabled(It.IsAny())).Returns(true);
+
+ var mockLoggerFactory = new Mock();
+ mockLoggerFactory.Setup(x => x.CreateLogger(It.IsAny())).Returns(logger.Object);
+
+ var sut = new OpenAITextEmbeddingGenerationService(modelId, "apiKey", httpClient: client, loggerFactory: mockLoggerFactory.Object);
+
+ // Act
+ await sut.GenerateEmbeddingsAsync(["description"]);
+
+ // Assert
+ logger.VerifyLog(LogLevel.Information, $"Action: {nameof(OpenAITextEmbeddingGenerationService.GenerateEmbeddingsAsync)}. OpenAI Model ID: {modelId}.", Times.Once());
+ }
}
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Services/OpenAITextToImageServiceTests.cs b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Services/OpenAITextToImageServiceTests.cs
new file mode 100644
index 000000000000..919b864327e8
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Services/OpenAITextToImageServiceTests.cs
@@ -0,0 +1,108 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.IO;
+using System.Net.Http;
+using System.Threading.Tasks;
+using Microsoft.Extensions.Logging;
+using Microsoft.SemanticKernel.Connectors.OpenAI;
+using Microsoft.SemanticKernel.Services;
+using Moq;
+using OpenAI;
+using Xunit;
+
+namespace SemanticKernel.Connectors.UnitTests.OpenAI.Services;
+
+///
+/// Unit tests for class.
+///
+public sealed class OpenAITextToImageServiceTests : IDisposable
+{
+ private readonly HttpMessageHandlerStub _messageHandlerStub;
+ private readonly HttpClient _httpClient;
+ private readonly Mock _mockLoggerFactory;
+
+ public OpenAITextToImageServiceTests()
+ {
+ this._messageHandlerStub = new()
+ {
+ ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK)
+ {
+ Content = new StringContent(File.ReadAllText("./TestData/text-to-image-response.txt"))
+ }
+ };
+ this._httpClient = new HttpClient(this._messageHandlerStub, false);
+ this._mockLoggerFactory = new Mock();
+ }
+
+ [Fact]
+ public void ConstructorWorksCorrectly()
+ {
+ // Arrange & Act
+ var sut = new OpenAITextToImageService("model", "api-key", "organization");
+
+ // Assert
+ Assert.NotNull(sut);
+ Assert.Equal("organization", sut.Attributes[ClientCore.OrganizationKey]);
+ Assert.Equal("model", sut.Attributes[AIServiceExtensions.ModelIdKey]);
+ }
+
+ [Fact]
+ public void OpenAIClientConstructorWorksCorrectly()
+ {
+ // Arrange
+ var sut = new OpenAITextToImageService("model", new OpenAIClient("apikey"));
+
+ // Assert
+ Assert.NotNull(sut);
+ Assert.Equal("model", sut.Attributes[AIServiceExtensions.ModelIdKey]);
+ }
+
+ [Theory]
+ [InlineData(256, 256, "dall-e-2")]
+ [InlineData(512, 512, "dall-e-2")]
+ [InlineData(1024, 1024, "dall-e-2")]
+ [InlineData(1024, 1024, "dall-e-3")]
+ [InlineData(1024, 1792, "dall-e-3")]
+ [InlineData(1792, 1024, "dall-e-3")]
+ [InlineData(123, 321, "custom-model-1")]
+ [InlineData(179, 124, "custom-model-2")]
+ public async Task GenerateImageWorksCorrectlyAsync(int width, int height, string modelId)
+ {
+ // Arrange
+ var sut = new OpenAITextToImageService(modelId, "api-key", httpClient: this._httpClient);
+ Assert.Equal(modelId, sut.Attributes["ModelId"]);
+
+ // Act
+ var result = await sut.GenerateImageAsync("description", width, height);
+
+ // Assert
+ Assert.Equal("https://image-url/", result);
+ }
+
+ [Fact]
+ public async Task GenerateImageDoesLogActionAsync()
+ {
+ // Assert
+ var modelId = "dall-e-2";
+ var logger = new Mock>();
+ logger.Setup(l => l.IsEnabled(It.IsAny())).Returns(true);
+
+ this._mockLoggerFactory.Setup(x => x.CreateLogger(It.IsAny())).Returns(logger.Object);
+
+ // Arrange
+ var sut = new OpenAITextToImageService(modelId, "apiKey", httpClient: this._httpClient, loggerFactory: this._mockLoggerFactory.Object);
+
+ // Act
+ await sut.GenerateImageAsync("description", 256, 256);
+
+ // Assert
+ logger.VerifyLog(LogLevel.Information, $"Action: {nameof(OpenAITextToImageService.GenerateImageAsync)}. OpenAI Model ID: {modelId}.", Times.Once());
+ }
+
+ public void Dispose()
+ {
+ this._httpClient.Dispose();
+ this._messageHandlerStub.Dispose();
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/TestData/text-to-image-response.txt b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/TestData/text-to-image-response.txt
new file mode 100644
index 000000000000..7d8f7327a5ec
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/TestData/text-to-image-response.txt
@@ -0,0 +1,8 @@
+{
+ "created": 1702575371,
+ "data": [
+ {
+ "url": "https://image-url/"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2/Core/ClientCore.Embeddings.cs b/dotnet/src/Connectors/Connectors.OpenAIV2/Core/ClientCore.Embeddings.cs
index d11e2799addd..aa15de012084 100644
--- a/dotnet/src/Connectors/Connectors.OpenAIV2/Core/ClientCore.Embeddings.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2/Core/ClientCore.Embeddings.cs
@@ -13,8 +13,6 @@ This class was created to simplify any Text Embeddings Support from the v1 Clien
using System.Threading.Tasks;
using OpenAI.Embeddings;
-#pragma warning disable CA2208 // Instantiate argument exceptions correctly
-
namespace Microsoft.SemanticKernel.Connectors.OpenAI;
///
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2/Core/ClientCore.TextToImage.cs b/dotnet/src/Connectors/Connectors.OpenAIV2/Core/ClientCore.TextToImage.cs
new file mode 100644
index 000000000000..26d8480fd004
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2/Core/ClientCore.TextToImage.cs
@@ -0,0 +1,53 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+/*
+Phase 02
+
+- This class was created focused in the Image Generation using the SDK client instead of the own client in V1.
+- Added Checking for empty or whitespace prompt.
+- Removed the format parameter as this is never called in V1 code. Plan to implement it in the future once we change the ITextToImageService abstraction, using PromptExecutionSettings.
+- Allow custom size for images when the endpoint is not the default OpenAI v1 endpoint.
+*/
+
+using System.ClientModel;
+using System.Threading;
+using System.Threading.Tasks;
+using OpenAI.Images;
+
+namespace Microsoft.SemanticKernel.Connectors.OpenAI;
+
+///
+/// Base class for AI clients that provides common functionality for interacting with OpenAI services.
+///
+internal partial class ClientCore
+{
+ ///
+ /// Generates an image with the provided configuration.
+ ///
+ /// Prompt to generate the image
+ /// Width of the image
+ /// Height of the image
+ /// The to monitor for cancellation requests. The default is .
+ /// Url of the generated image
+ internal async Task GenerateImageAsync(
+ string prompt,
+ int width,
+ int height,
+ CancellationToken cancellationToken)
+ {
+ Verify.NotNullOrWhiteSpace(prompt);
+
+ var size = new GeneratedImageSize(width, height);
+
+ var imageOptions = new ImageGenerationOptions()
+ {
+ Size = size,
+ ResponseFormat = GeneratedImageFormat.Uri
+ };
+
+ ClientResult response = await RunRequestAsync(() => this.Client.GetImageClient(this.ModelId).GenerateImageAsync(prompt, imageOptions, cancellationToken)).ConfigureAwait(false);
+ var generatedImage = response.Value;
+
+ return generatedImage.ImageUri?.ToString() ?? throw new KernelException("The generated image is not in url format");
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2/Core/ClientCore.cs b/dotnet/src/Connectors/Connectors.OpenAIV2/Core/ClientCore.cs
index 12ca2f3d92fe..a6be6d20aa46 100644
--- a/dotnet/src/Connectors/Connectors.OpenAIV2/Core/ClientCore.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2/Core/ClientCore.cs
@@ -4,6 +4,11 @@
Phase 01 : This class was created adapting and merging ClientCore and OpenAIClientCore classes.
System.ClientModel changes were added and adapted to the code as this package is now used as a dependency over OpenAI package.
All logic from original ClientCore and OpenAIClientCore were preserved.
+
+Phase 02 :
+- Moved AddAttributes usage to the constructor, avoiding the need verify and adding it in the services.
+- Added ModelId attribute to the OpenAIClient constructor.
+- Added WhiteSpace instead of empty string for ApiKey to avoid exception from OpenAI Client on custom endpoints added an issue in OpenAI SDK repo. https://github.com/openai/openai-dotnet/issues/90
*/
using System;
@@ -17,6 +22,7 @@ All logic from original ClientCore and OpenAIClientCore were preserved.
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.SemanticKernel.Http;
+using Microsoft.SemanticKernel.Services;
using OpenAI;
#pragma warning disable CA2208 // Instantiate argument exceptions correctly
@@ -28,6 +34,16 @@ namespace Microsoft.SemanticKernel.Connectors.OpenAI;
///
internal partial class ClientCore
{
+ ///
+ /// White space constant.
+ ///
+ private const string SingleSpace = " ";
+
+ ///
+ /// Gets the attribute name used to store the organization in the dictionary.
+ ///
+ internal const string OrganizationKey = "Organization";
+
///
/// Default OpenAI API endpoint.
///
@@ -63,15 +79,15 @@ internal partial class ClientCore
///
/// Model name.
/// OpenAI API Key.
- /// OpenAI compatible API endpoint.
/// OpenAI Organization Id (usually optional).
+ /// OpenAI compatible API endpoint.
/// Custom for HTTP requests.
/// The to use for logging. If null, no logging will be performed.
internal ClientCore(
string modelId,
string? apiKey = null,
- Uri? endpoint = null,
string? organizationId = null,
+ Uri? endpoint = null,
HttpClient? httpClient = null,
ILogger? logger = null)
{
@@ -80,6 +96,8 @@ internal ClientCore(
this.Logger = logger ?? NullLogger.Instance;
this.ModelId = modelId;
+ this.AddAttribute(AIServiceExtensions.ModelIdKey, modelId);
+
// Accepts the endpoint if provided, otherwise uses the default OpenAI endpoint.
this.Endpoint = endpoint ?? httpClient?.BaseAddress;
if (this.Endpoint is null)
@@ -87,14 +105,23 @@ internal ClientCore(
Verify.NotNullOrWhiteSpace(apiKey); // For Public OpenAI Endpoint a key must be provided.
this.Endpoint = new Uri(OpenAIV1Endpoint);
}
+ else if (string.IsNullOrEmpty(apiKey))
+ {
+ // Avoids an exception from OpenAI Client when a custom endpoint is provided without an API key.
+ apiKey = SingleSpace;
+ }
+
+ this.AddAttribute(AIServiceExtensions.EndpointKey, this.Endpoint.ToString());
var options = GetOpenAIClientOptions(httpClient, this.Endpoint);
if (!string.IsNullOrWhiteSpace(organizationId))
{
options.AddPolicy(new AddHeaderRequestPolicy("OpenAI-Organization", organizationId!), PipelinePosition.PerCall);
+
+ this.AddAttribute(ClientCore.OrganizationKey, organizationId);
}
- this.Client = new OpenAIClient(apiKey ?? string.Empty, options);
+ this.Client = new OpenAIClient(apiKey!, options);
}
///
@@ -116,6 +143,8 @@ internal ClientCore(
this.Logger = logger ?? NullLogger.Instance;
this.ModelId = modelId;
this.Client = openAIClient;
+
+ this.AddAttribute(AIServiceExtensions.ModelIdKey, modelId);
}
///
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2/Extensions/OpenAIKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.OpenAIV2/Extensions/OpenAIKernelBuilderExtensions.cs
new file mode 100644
index 000000000000..567d82726e4b
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2/Extensions/OpenAIKernelBuilderExtensions.cs
@@ -0,0 +1,152 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Diagnostics.CodeAnalysis;
+using System.Net.Http;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.Extensions.Logging;
+using Microsoft.SemanticKernel.Connectors.OpenAI;
+using Microsoft.SemanticKernel.Embeddings;
+using Microsoft.SemanticKernel.Http;
+using Microsoft.SemanticKernel.TextToImage;
+using OpenAI;
+
+namespace Microsoft.SemanticKernel;
+
+///
+/// Sponsor extensions class for .
+///
+public static class OpenAIKernelBuilderExtensions
+{
+ #region Text Embedding
+ ///
+ /// Adds the OpenAI text embeddings service to the list.
+ ///
+ /// The instance to augment.
+ /// OpenAI model name, see https://platform.openai.com/docs/models
+ /// OpenAI API key, see https://platform.openai.com/account/api-keys
+ /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations.
+ /// A local identifier for the given AI service
+ /// Non-default endpoint for the OpenAI API.
+ /// The HttpClient to use with this service.
+ /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models.
+ /// The same instance as .
+ [Experimental("SKEXP0010")]
+ public static IKernelBuilder AddOpenAITextEmbeddingGeneration(
+ this IKernelBuilder builder,
+ string modelId,
+ string apiKey,
+ string? orgId = null,
+ string? serviceId = null,
+ Uri? endpoint = null,
+ HttpClient? httpClient = null,
+ int? dimensions = null)
+ {
+ Verify.NotNull(builder);
+
+ builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
+ new OpenAITextEmbeddingGenerationService(
+ modelId,
+ apiKey,
+ orgId,
+ endpoint,
+ HttpClientProvider.GetHttpClient(httpClient, serviceProvider),
+ serviceProvider.GetService(),
+ dimensions));
+
+ return builder;
+ }
+
+ ///
+ /// Adds the OpenAI text embeddings service to the list.
+ ///
+ /// The instance to augment.
+ /// OpenAI model name, see https://platform.openai.com/docs/models
+ /// to use for the service. If null, one must be available in the service provider when this service is resolved.
+ /// A local identifier for the given AI service
+ /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models.
+ /// The same instance as .
+ [Experimental("SKEXP0010")]
+ public static IKernelBuilder AddOpenAITextEmbeddingGeneration(
+ this IKernelBuilder builder,
+ string modelId,
+ OpenAIClient? openAIClient = null,
+ string? serviceId = null,
+ int? dimensions = null)
+ {
+ Verify.NotNull(builder);
+
+ builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
+ new OpenAITextEmbeddingGenerationService(
+ modelId,
+ openAIClient ?? serviceProvider.GetRequiredService(),
+ serviceProvider.GetService(),
+ dimensions));
+
+ return builder;
+ }
+ #endregion
+
+ #region Text to Image
+ ///
+ /// Add the OpenAI Dall-E text to image service to the list
+ ///
+ /// The instance to augment.
+ /// OpenAI model name, see https://platform.openai.com/docs/models
+ /// to use for the service. If null, one must be available in the service provider when this service is resolved.
+ /// A local identifier for the given AI service
+ /// The same instance as .
+ [Experimental("SKEXP0010")]
+ public static IKernelBuilder AddOpenAITextToImage(
+ this IKernelBuilder builder,
+ string modelId,
+ OpenAIClient? openAIClient = null,
+ string? serviceId = null)
+ {
+ Verify.NotNull(builder);
+
+ builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
+ new OpenAITextToImageService(
+ modelId,
+ openAIClient ?? serviceProvider.GetRequiredService(),
+ serviceProvider.GetService()));
+
+ return builder;
+ }
+
+ ///
+ /// Add the OpenAI Dall-E text to image service to the list
+ ///
+ /// The instance to augment.
+ /// The model to use for image generation.
+ /// OpenAI API key, see https://platform.openai.com/account/api-keys
+ /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations.
+ /// A local identifier for the given AI service
+ /// Non-default endpoint for the OpenAI API.
+ /// The HttpClient to use with this service.
+ /// The same instance as .
+ [Experimental("SKEXP0010")]
+ public static IKernelBuilder AddOpenAITextToImage(
+ this IKernelBuilder builder,
+ string modelId,
+ string apiKey,
+ string? orgId = null,
+ string? serviceId = null,
+ Uri? endpoint = null,
+ HttpClient? httpClient = null)
+ {
+ Verify.NotNull(builder);
+
+ builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
+ new OpenAITextToImageService(
+ modelId,
+ apiKey,
+ orgId,
+ endpoint,
+ HttpClientProvider.GetHttpClient(httpClient, serviceProvider),
+ serviceProvider.GetService()));
+
+ return builder;
+ }
+ #endregion
+}
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2/Extensions/OpenAIServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.OpenAIV2/Extensions/OpenAIServiceCollectionExtensions.cs
new file mode 100644
index 000000000000..77355de7f24e
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2/Extensions/OpenAIServiceCollectionExtensions.cs
@@ -0,0 +1,146 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Diagnostics.CodeAnalysis;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.Extensions.Logging;
+using Microsoft.SemanticKernel.Connectors.OpenAI;
+using Microsoft.SemanticKernel.Embeddings;
+using Microsoft.SemanticKernel.Http;
+using Microsoft.SemanticKernel.TextToImage;
+using OpenAI;
+
+namespace Microsoft.SemanticKernel;
+
+/* Phase 02
+- Add endpoint parameter for both Embedding and TextToImage services extensions.
+- Removed unnecessary Validation checks (that are already happening in the service/client constructors)
+- Added openAIClient extension for TextToImage service.
+- Changed parameters order for TextToImage service extension (modelId comes first).
+- Made modelId a required parameter of TextToImage services.
+
+*/
+///
+/// Sponsor extensions class for .
+///
+public static class OpenAIServiceCollectionExtensions
+{
+ #region Text Embedding
+ ///
+ /// Adds the OpenAI text embeddings service to the list.
+ ///
+ /// The instance to augment.
+ /// OpenAI model name, see https://platform.openai.com/docs/models
+ /// OpenAI API key, see https://platform.openai.com/account/api-keys
+ /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations.
+ /// A local identifier for the given AI service
+ /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models.
+ /// Non-default endpoint for the OpenAI API.
+ /// The same instance as .
+ [Experimental("SKEXP0010")]
+ public static IServiceCollection AddOpenAITextEmbeddingGeneration(
+ this IServiceCollection services,
+ string modelId,
+ string apiKey,
+ string? orgId = null,
+ string? serviceId = null,
+ int? dimensions = null,
+ Uri? endpoint = null)
+ {
+ Verify.NotNull(services);
+
+ return services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
+ new OpenAITextEmbeddingGenerationService(
+ modelId,
+ apiKey,
+ orgId,
+ endpoint,
+ HttpClientProvider.GetHttpClient(serviceProvider),
+ serviceProvider.GetService(),
+ dimensions));
+ }
+
+ ///
+ /// Adds the OpenAI text embeddings service to the list.
+ ///
+ /// The instance to augment.
+ /// The OpenAI model id.
+ /// to use for the service. If null, one must be available in the service provider when this service is resolved.
+ /// A local identifier for the given AI service
+ /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models.
+ /// The same instance as .
+ [Experimental("SKEXP0010")]
+ public static IServiceCollection AddOpenAITextEmbeddingGeneration(this IServiceCollection services,
+ string modelId,
+ OpenAIClient? openAIClient = null,
+ string? serviceId = null,
+ int? dimensions = null)
+ {
+ Verify.NotNull(services);
+
+ return services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
+ new OpenAITextEmbeddingGenerationService(
+ modelId,
+ openAIClient ?? serviceProvider.GetRequiredService(),
+ serviceProvider.GetService(),
+ dimensions));
+ }
+ #endregion
+
+ #region Text to Image
+ ///
+ /// Add the OpenAI Dall-E text to image service to the list
+ ///
+ /// The instance to augment.
+ /// The model to use for image generation.
+ /// OpenAI API key, see https://platform.openai.com/account/api-keys
+ /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations.
+ /// A local identifier for the given AI service
+ /// Non-default endpoint for the OpenAI API.
+ /// The same instance as .
+ [Experimental("SKEXP0010")]
+ public static IServiceCollection AddOpenAITextToImage(this IServiceCollection services,
+ string modelId,
+ string apiKey,
+ string? orgId = null,
+ string? serviceId = null,
+ Uri? endpoint = null)
+ {
+ Verify.NotNull(services);
+
+ return services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
+ new OpenAITextToImageService(
+ modelId,
+ apiKey,
+ orgId,
+ endpoint,
+ HttpClientProvider.GetHttpClient(serviceProvider),
+ serviceProvider.GetService()));
+ }
+
+ ///
+ /// Adds the OpenAI text embeddings service to the list.
+ ///
+ /// The instance to augment.
+ /// The OpenAI model id.
+ /// to use for the service. If null, one must be available in the service provider when this service is resolved.
+ /// A local identifier for the given AI service
+ /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models.
+ /// The same instance as .
+ [Experimental("SKEXP0010")]
+ public static IServiceCollection AddOpenAITextToImage(this IServiceCollection services,
+ string modelId,
+ OpenAIClient? openAIClient = null,
+ string? serviceId = null,
+ int? dimensions = null)
+ {
+ Verify.NotNull(services);
+
+ return services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
+ new OpenAITextToImageService(
+ modelId,
+ openAIClient ?? serviceProvider.GetRequiredService(),
+ serviceProvider.GetService()));
+ }
+ #endregion
+}
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2/Services/OpenAITextEmbbedingGenerationService.cs b/dotnet/src/Connectors/Connectors.OpenAIV2/Services/OpenAITextEmbbedingGenerationService.cs
index 49915031b7fc..a4dd48ba75e3 100644
--- a/dotnet/src/Connectors/Connectors.OpenAIV2/Services/OpenAITextEmbbedingGenerationService.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2/Services/OpenAITextEmbbedingGenerationService.cs
@@ -8,11 +8,14 @@
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.SemanticKernel.Embeddings;
-using Microsoft.SemanticKernel.Services;
using OpenAI;
namespace Microsoft.SemanticKernel.Connectors.OpenAI;
+/* Phase 02
+Adding the non-default endpoint parameter to the constructor.
+*/
+
///
/// OpenAI implementation of
///
@@ -28,6 +31,7 @@ public sealed class OpenAITextEmbeddingGenerationService : ITextEmbeddingGenerat
/// Model name
/// OpenAI API Key
/// OpenAI Organization Id (usually optional)
+ /// Non-default endpoint for the OpenAI API
/// Custom for HTTP requests.
/// The to use for logging. If null, no logging will be performed.
/// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models.
@@ -35,6 +39,7 @@ public OpenAITextEmbeddingGenerationService(
string modelId,
string apiKey,
string? organization = null,
+ Uri? endpoint = null,
HttpClient? httpClient = null,
ILoggerFactory? loggerFactory = null,
int? dimensions = null)
@@ -42,12 +47,11 @@ public OpenAITextEmbeddingGenerationService(
this._core = new(
modelId: modelId,
apiKey: apiKey,
+ endpoint: endpoint,
organizationId: organization,
httpClient: httpClient,
logger: loggerFactory?.CreateLogger(typeof(OpenAITextEmbeddingGenerationService)));
- this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId);
-
this._dimensions = dimensions;
}
@@ -65,8 +69,6 @@ public OpenAITextEmbeddingGenerationService(
int? dimensions = null)
{
this._core = new(modelId, openAIClient, loggerFactory?.CreateLogger(typeof(OpenAITextEmbeddingGenerationService)));
- this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId);
-
this._dimensions = dimensions;
}
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2/Services/OpenAITextToImageService.cs b/dotnet/src/Connectors/Connectors.OpenAIV2/Services/OpenAITextToImageService.cs
new file mode 100644
index 000000000000..55eca0e112eb
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2/Services/OpenAITextToImageService.cs
@@ -0,0 +1,76 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
+using System.Net.Http;
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.Extensions.Logging;
+using Microsoft.SemanticKernel.TextToImage;
+using OpenAI;
+
+/* Phase 02
+- Breaking the current constructor parameter order to follow the same order as the other services.
+- Added custom endpoint support, and removed ApiKey validation, as it is performed by the ClientCore when the Endpoint is not provided.
+- Added custom OpenAIClient support.
+- Updated "organization" parameter to "organizationId".
+- "modelId" parameter is now required in the constructor.
+
+- Added OpenAIClient breaking glass constructor.
+*/
+
+namespace Microsoft.SemanticKernel.Connectors.OpenAI;
+
+///
+/// OpenAI text to image service.
+///
+[Experimental("SKEXP0010")]
+public class OpenAITextToImageService : ITextToImageService
+{
+ private readonly ClientCore _core;
+
+ ///
+ public IReadOnlyDictionary Attributes => this._core.Attributes;
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// The model to use for image generation.
+ /// OpenAI API key, see https://platform.openai.com/account/api-keys
+ /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations.
+ /// Non-default endpoint for the OpenAI API.
+ /// Custom for HTTP requests.
+ /// The to use for logging. If null, no logging will be performed.
+ public OpenAITextToImageService(
+ string modelId,
+ string? apiKey = null,
+ string? organizationId = null,
+ Uri? endpoint = null,
+ HttpClient? httpClient = null,
+ ILoggerFactory? loggerFactory = null)
+ {
+ this._core = new(modelId, apiKey, organizationId, endpoint, httpClient, loggerFactory?.CreateLogger(this.GetType()));
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// Model name
+ /// Custom for HTTP requests.
+ /// The to use for logging. If null, no logging will be performed.
+ public OpenAITextToImageService(
+ string modelId,
+ OpenAIClient openAIClient,
+ ILoggerFactory? loggerFactory = null)
+ {
+ this._core = new(modelId, openAIClient, loggerFactory?.CreateLogger(typeof(OpenAITextEmbeddingGenerationService)));
+ }
+
+ ///
+ public Task GenerateImageAsync(string description, int width, int height, Kernel? kernel = null, CancellationToken cancellationToken = default)
+ {
+ this._core.LogActionDetails();
+ return this._core.GenerateImageAsync(description, width, height, cancellationToken);
+ }
+}
diff --git a/dotnet/src/IntegrationTestsV2/Connectors/OpenAI/OpenAITextEmbeddingTests.cs b/dotnet/src/IntegrationTestsV2/Connectors/OpenAI/OpenAITextEmbeddingTests.cs
index 6eca1909a546..bccc92bfa0f3 100644
--- a/dotnet/src/IntegrationTestsV2/Connectors/OpenAI/OpenAITextEmbeddingTests.cs
+++ b/dotnet/src/IntegrationTestsV2/Connectors/OpenAI/OpenAITextEmbeddingTests.cs
@@ -19,7 +19,7 @@ public sealed class OpenAITextEmbeddingTests
.AddUserSecrets()
.Build();
- [Theory]//(Skip = "OpenAI will often throttle requests. This test is for manual verification.")]
+ [Theory(Skip = "OpenAI will often throttle requests. This test is for manual verification.")]
[InlineData("test sentence")]
public async Task OpenAITestAsync(string testInputString)
{
@@ -38,7 +38,7 @@ public async Task OpenAITestAsync(string testInputString)
Assert.Equal(3, batchResult.Count);
}
- [Theory]//(Skip = "OpenAI will often throttle requests. This test is for manual verification.")]
+ [Theory(Skip = "OpenAI will often throttle requests. This test is for manual verification.")]
[InlineData(null, 3072)]
[InlineData(1024, 1024)]
public async Task OpenAIWithDimensionsAsync(int? dimensions, int expectedVectorLength)
diff --git a/dotnet/src/IntegrationTestsV2/Connectors/OpenAI/OpenAITextToImageTests.cs b/dotnet/src/IntegrationTestsV2/Connectors/OpenAI/OpenAITextToImageTests.cs
new file mode 100644
index 000000000000..812d41677b28
--- /dev/null
+++ b/dotnet/src/IntegrationTestsV2/Connectors/OpenAI/OpenAITextToImageTests.cs
@@ -0,0 +1,42 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Threading.Tasks;
+using Microsoft.Extensions.Configuration;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.TextToImage;
+using SemanticKernel.IntegrationTests.TestSettings;
+using Xunit;
+
+namespace SemanticKernel.IntegrationTests.Connectors.OpenAI;
+public sealed class OpenAITextToImageTests
+{
+ private readonly IConfigurationRoot _configuration = new ConfigurationBuilder()
+ .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true)
+ .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true)
+ .AddEnvironmentVariables()
+ .AddUserSecrets()
+ .Build();
+
+ [Theory(Skip = "This test is for manual verification.")]
+ [InlineData("dall-e-2", 512, 512)]
+ [InlineData("dall-e-3", 1024, 1024)]
+ public async Task OpenAITextToImageByModelTestAsync(string modelId, int width, int height)
+ {
+ // Arrange
+ OpenAIConfiguration? openAIConfiguration = this._configuration.GetSection("OpenAITextToImage").Get();
+ Assert.NotNull(openAIConfiguration);
+
+ var kernel = Kernel.CreateBuilder()
+ .AddOpenAITextToImage(modelId, apiKey: openAIConfiguration.ApiKey)
+ .Build();
+
+ var service = kernel.GetRequiredService();
+
+ // Act
+ var result = await service.GenerateImageAsync("The sun rises in the east and sets in the west.", width, height);
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.NotEmpty(result);
+ }
+}
diff --git a/dotnet/src/InternalUtilities/test/MoqExtensions.cs b/dotnet/src/InternalUtilities/test/MoqExtensions.cs
new file mode 100644
index 000000000000..8fb435e288f9
--- /dev/null
+++ b/dotnet/src/InternalUtilities/test/MoqExtensions.cs
@@ -0,0 +1,22 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using Microsoft.Extensions.Logging;
+using Moq;
+
+#pragma warning disable CS8620 // Argument cannot be used for parameter due to differences in the nullability of reference types.
+
+internal static class MoqExtensions
+{
+ public static void VerifyLog(this Mock> logger, LogLevel logLevel, string message, Times times)
+ {
+ logger.Verify(
+ x => x.Log(
+ It.Is(l => l == logLevel),
+ It.IsAny(),
+ It.Is((v, t) => v.ToString()!.Contains(message)),
+ It.IsAny(),
+ It.IsAny>()),
+ times);
+ }
+}
diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/TextToImage/ITextToImageService.cs b/dotnet/src/SemanticKernel.Abstractions/AI/TextToImage/ITextToImageService.cs
index c4c967445a6b..b30f78f3c0ca 100644
--- a/dotnet/src/SemanticKernel.Abstractions/AI/TextToImage/ITextToImageService.cs
+++ b/dotnet/src/SemanticKernel.Abstractions/AI/TextToImage/ITextToImageService.cs
@@ -5,6 +5,10 @@
using System.Threading.Tasks;
using Microsoft.SemanticKernel.Services;
+/* Phase 02
+- Changing "description" parameter to "prompt" to better match the OpenAI API and avoid confusion.
+*/
+
namespace Microsoft.SemanticKernel.TextToImage;
///
@@ -16,7 +20,7 @@ public interface ITextToImageService : IAIService
///
/// Generate an image matching the given description
///
- /// Image description
+ /// Image generation prompt
/// Image width in pixels
/// Image height in pixels
/// The containing services, plugins, and other state for use throughout the operation.
From c8d9adeeaa819f5d5edd67898215ebc9917c5735 Mon Sep 17 00:00:00 2001
From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com>
Date: Wed, 26 Jun 2024 22:59:02 +0100
Subject: [PATCH 009/226] .Net OpenAI V2 - Internal Utilities - Phase 03
(#6970)
- Updating policies using OpenAI SDK approach (GenericPolicy) impl.
- Updated Unit Tests
- Moved policy impl to openai Utilities.
---
dotnet/SK-dotnet.sln | 12 +++
.../Models/PipelineSynchronousPolicyTests.cs | 56 ------------
.../Connectors.OpenAIV2.csproj | 1 +
.../Connectors.OpenAIV2/Core/ClientCore.cs | 15 +++-
.../Core/Models/AddHeaderRequestPolicy.cs | 23 -----
.../Core/Models/PipelineSynchronousPolicy.cs | 89 -------------------
.../openai/OpenAIUtilities.props | 5 ++
.../Policies/GeneratedActionPipelinePolicy.cs | 45 ++++++++++
.../SemanticKernel.UnitTests.csproj | 2 +
.../GenericActionPipelinePolicyTests.cs} | 18 ++--
10 files changed, 85 insertions(+), 181 deletions(-)
delete mode 100644 dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Core/Models/PipelineSynchronousPolicyTests.cs
delete mode 100644 dotnet/src/Connectors/Connectors.OpenAIV2/Core/Models/AddHeaderRequestPolicy.cs
delete mode 100644 dotnet/src/Connectors/Connectors.OpenAIV2/Core/Models/PipelineSynchronousPolicy.cs
create mode 100644 dotnet/src/InternalUtilities/openai/OpenAIUtilities.props
create mode 100644 dotnet/src/InternalUtilities/openai/Policies/GeneratedActionPipelinePolicy.cs
rename dotnet/src/{Connectors/Connectors.OpenAIV2.UnitTests/Core/Models/AddHeaderRequestPolicyTests.cs => SemanticKernel.UnitTests/Utilities/OpenAI/GenericActionPipelinePolicyTests.cs} (54%)
diff --git a/dotnet/SK-dotnet.sln b/dotnet/SK-dotnet.sln
index 326a35a79ff7..6da6c33ec47a 100644
--- a/dotnet/SK-dotnet.sln
+++ b/dotnet/SK-dotnet.sln
@@ -327,6 +327,16 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.AzureOpenAI", "s
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.AzureOpenAI.UnitTests", "src\Connectors\Connectors.AzureOpenAI.UnitTests\Connectors.AzureOpenAI.UnitTests.csproj", "{DB219924-208B-4CDD-8796-EE424689901E}"
EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "openai", "openai", "{2E79AD99-632F-411F-B3A5-1BAF3F5F89AB}"
+ ProjectSection(SolutionItems) = preProject
+ src\InternalUtilities\openai\OpenAIUtilities.props = src\InternalUtilities\openai\OpenAIUtilities.props
+ EndProjectSection
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Policies", "Policies", "{7308EF7D-5F9A-47B2-A62F-0898603262A8}"
+ ProjectSection(SolutionItems) = preProject
+ src\InternalUtilities\openai\Policies\GeneratedActionPipelinePolicy.cs = src\InternalUtilities\openai\Policies\GeneratedActionPipelinePolicy.cs
+ EndProjectSection
+EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -932,6 +942,8 @@ Global
{FDEB4884-89B9-4656-80A0-57C7464490F7} = {831DDCA2-7D2C-4C31-80DB-6BDB3E1F7AE0}
{6744272E-8326-48CE-9A3F-6BE227A5E777} = {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1}
{DB219924-208B-4CDD-8796-EE424689901E} = {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1}
+ {2E79AD99-632F-411F-B3A5-1BAF3F5F89AB} = {4D3DAE63-41C6-4E1C-A35A-E77BDFC40675}
+ {7308EF7D-5F9A-47B2-A62F-0898603262A8} = {2E79AD99-632F-411F-B3A5-1BAF3F5F89AB}
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {FBDC56A3-86AD-4323-AA0F-201E59123B83}
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Core/Models/PipelineSynchronousPolicyTests.cs b/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Core/Models/PipelineSynchronousPolicyTests.cs
deleted file mode 100644
index cae4b32b4283..000000000000
--- a/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Core/Models/PipelineSynchronousPolicyTests.cs
+++ /dev/null
@@ -1,56 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using System.ClientModel.Primitives;
-using System.Collections.Generic;
-using System.Threading.Tasks;
-using Microsoft.SemanticKernel.Connectors.OpenAI;
-using Xunit;
-
-namespace SemanticKernel.Connectors.OpenAI.UnitTests.Core.Models;
-public class PipelineSynchronousPolicyTests
-{
- [Fact]
- public async Task ItProcessAsyncWhenSpecializationHasReceivedResponseOverrideShouldCallIt()
- {
- // Arrange
- var first = new MyHttpPipelinePolicyWithoutOverride();
- var last = new MyHttpPipelinePolicyWithOverride();
-
- IReadOnlyList policies = [first, last];
-
- // Act
- await policies[0].ProcessAsync(ClientPipeline.Create().CreateMessage(), policies, 0);
-
- // Assert
- Assert.True(first.CalledProcess);
- Assert.True(last.CalledProcess);
- Assert.True(last.CalledOnReceivedResponse);
- }
-
- private class MyHttpPipelinePolicyWithoutOverride : PipelineSynchronousPolicy
- {
- public bool CalledProcess { get; private set; }
-
- public override void Process(PipelineMessage message, IReadOnlyList pipeline, int currentIndex)
- {
- this.CalledProcess = true;
- base.Process(message, pipeline, currentIndex);
- }
-
- public override ValueTask ProcessAsync(PipelineMessage message, IReadOnlyList pipeline, int currentIndex)
- {
- this.CalledProcess = true;
- return base.ProcessAsync(message, pipeline, currentIndex);
- }
- }
-
- private sealed class MyHttpPipelinePolicyWithOverride : MyHttpPipelinePolicyWithoutOverride
- {
- public bool CalledOnReceivedResponse { get; private set; }
-
- public override void OnReceivedResponse(PipelineMessage message)
- {
- this.CalledOnReceivedResponse = true;
- }
- }
-}
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2/Connectors.OpenAIV2.csproj b/dotnet/src/Connectors/Connectors.OpenAIV2/Connectors.OpenAIV2.csproj
index b17b14eb91ef..22f364461818 100644
--- a/dotnet/src/Connectors/Connectors.OpenAIV2/Connectors.OpenAIV2.csproj
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2/Connectors.OpenAIV2.csproj
@@ -13,6 +13,7 @@
+
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2/Core/ClientCore.cs b/dotnet/src/Connectors/Connectors.OpenAIV2/Core/ClientCore.cs
index a6be6d20aa46..355000887f51 100644
--- a/dotnet/src/Connectors/Connectors.OpenAIV2/Core/ClientCore.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAIV2/Core/ClientCore.cs
@@ -116,7 +116,7 @@ internal ClientCore(
var options = GetOpenAIClientOptions(httpClient, this.Endpoint);
if (!string.IsNullOrWhiteSpace(organizationId))
{
- options.AddPolicy(new AddHeaderRequestPolicy("OpenAI-Organization", organizationId!), PipelinePosition.PerCall);
+ options.AddPolicy(CreateRequestHeaderPolicy("OpenAI-Organization", organizationId!), PipelinePosition.PerCall);
this.AddAttribute(ClientCore.OrganizationKey, organizationId);
}
@@ -184,7 +184,7 @@ private static OpenAIClientOptions GetOpenAIClientOptions(HttpClient? httpClient
Endpoint = endpoint
};
- options.AddPolicy(new AddHeaderRequestPolicy(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(ClientCore))), PipelinePosition.PerCall);
+ options.AddPolicy(CreateRequestHeaderPolicy(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(ClientCore))), PipelinePosition.PerCall);
if (httpClient is not null)
{
@@ -213,4 +213,15 @@ private static async Task RunRequestAsync(Func> request)
throw e.ToHttpOperationException();
}
}
+
+ private static GenericActionPipelinePolicy CreateRequestHeaderPolicy(string headerName, string headerValue)
+ {
+ return new GenericActionPipelinePolicy((message) =>
+ {
+ if (message?.Request?.Headers?.TryGetValue(headerName, out string? _) == false)
+ {
+ message.Request.Headers.Set(headerName, headerValue);
+ }
+ });
+ }
}
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2/Core/Models/AddHeaderRequestPolicy.cs b/dotnet/src/Connectors/Connectors.OpenAIV2/Core/Models/AddHeaderRequestPolicy.cs
deleted file mode 100644
index 2279d639c54e..000000000000
--- a/dotnet/src/Connectors/Connectors.OpenAIV2/Core/Models/AddHeaderRequestPolicy.cs
+++ /dev/null
@@ -1,23 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-/* Phase 1
-Added from OpenAI v1 with adapted logic to the System.ClientModel abstraction
-*/
-
-using System.ClientModel.Primitives;
-
-namespace Microsoft.SemanticKernel.Connectors.OpenAI;
-
-///
-/// Helper class to inject headers into System ClientModel Http pipeline
-///
-internal sealed class AddHeaderRequestPolicy(string headerName, string headerValue) : PipelineSynchronousPolicy
-{
- private readonly string _headerName = headerName;
- private readonly string _headerValue = headerValue;
-
- public override void OnSendingRequest(PipelineMessage message)
- {
- message.Request.Headers.Add(this._headerName, this._headerValue);
- }
-}
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2/Core/Models/PipelineSynchronousPolicy.cs b/dotnet/src/Connectors/Connectors.OpenAIV2/Core/Models/PipelineSynchronousPolicy.cs
deleted file mode 100644
index b7690ead8b7f..000000000000
--- a/dotnet/src/Connectors/Connectors.OpenAIV2/Core/Models/PipelineSynchronousPolicy.cs
+++ /dev/null
@@ -1,89 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-/*
-Phase 1
-As SystemClient model does not have any specialization or extension ATM, introduced this class with the adapted to use System.ClientModel abstractions.
-https://github.com/Azure/azure-sdk-for-net/blob/8bd22837639d54acccc820e988747f8d28bbde4a/sdk/core/Azure.Core/src/Pipeline/HttpPipelineSynchronousPolicy.cs
-*/
-
-using System;
-using System.ClientModel.Primitives;
-using System.Collections.Generic;
-using System.Reflection;
-using System.Threading.Tasks;
-
-namespace Microsoft.SemanticKernel.Connectors.OpenAI;
-
-///
-/// Represents a that doesn't do any asynchronous or synchronously blocking operations.
-///
-internal class PipelineSynchronousPolicy : PipelinePolicy
-{
- private static readonly Type[] s_onReceivedResponseParameters = new[] { typeof(PipelineMessage) };
-
- private readonly bool _hasOnReceivedResponse = true;
-
- ///
- /// Initializes a new instance of
- ///
- protected PipelineSynchronousPolicy()
- {
- var onReceivedResponseMethod = this.GetType().GetMethod(nameof(OnReceivedResponse), BindingFlags.Instance | BindingFlags.Public, null, s_onReceivedResponseParameters, null);
- if (onReceivedResponseMethod != null)
- {
- this._hasOnReceivedResponse = onReceivedResponseMethod.GetBaseDefinition().DeclaringType != onReceivedResponseMethod.DeclaringType;
- }
- }
-
- ///
- public override void Process(PipelineMessage message, IReadOnlyList pipeline, int currentIndex)
- {
- this.OnSendingRequest(message);
- if (pipeline.Count > currentIndex + 1)
- {
- // If there are more policies in the pipeline, continue processing
- ProcessNext(message, pipeline, currentIndex);
- }
- this.OnReceivedResponse(message);
- }
-
- ///
- public override ValueTask ProcessAsync(PipelineMessage message, IReadOnlyList pipeline, int currentIndex)
- {
- if (!this._hasOnReceivedResponse)
- {
- // If OnReceivedResponse was not overridden we can avoid creating a state machine and return the task directly
- this.OnSendingRequest(message);
- if (pipeline.Count > currentIndex + 1)
- {
- // If there are more policies in the pipeline, continue processing
- return ProcessNextAsync(message, pipeline, currentIndex);
- }
- }
-
- return this.InnerProcessAsync(message, pipeline, currentIndex);
- }
-
- private async ValueTask InnerProcessAsync(PipelineMessage message, IReadOnlyList pipeline, int currentIndex)
- {
- this.OnSendingRequest(message);
- if (pipeline.Count > currentIndex + 1)
- {
- // If there are more policies in the pipeline, continue processing
- await ProcessNextAsync(message, pipeline, currentIndex).ConfigureAwait(false);
- }
- this.OnReceivedResponse(message);
- }
-
- ///
- /// Method is invoked before the request is sent.
- ///
- /// The containing the request.
- public virtual void OnSendingRequest(PipelineMessage message) { }
-
- ///
- /// Method is invoked after the response is received.
- ///
- /// The containing the response.
- public virtual void OnReceivedResponse(PipelineMessage message) { }
-}
diff --git a/dotnet/src/InternalUtilities/openai/OpenAIUtilities.props b/dotnet/src/InternalUtilities/openai/OpenAIUtilities.props
new file mode 100644
index 000000000000..e865b7fe40e9
--- /dev/null
+++ b/dotnet/src/InternalUtilities/openai/OpenAIUtilities.props
@@ -0,0 +1,5 @@
+
+
+
+
+
\ No newline at end of file
diff --git a/dotnet/src/InternalUtilities/openai/Policies/GeneratedActionPipelinePolicy.cs b/dotnet/src/InternalUtilities/openai/Policies/GeneratedActionPipelinePolicy.cs
new file mode 100644
index 000000000000..931f12957965
--- /dev/null
+++ b/dotnet/src/InternalUtilities/openai/Policies/GeneratedActionPipelinePolicy.cs
@@ -0,0 +1,45 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+/* Phase 03
+Adapted from OpenAI SDK original policy with warning updates.
+
+Original file: https://github.com/openai/openai-dotnet/blob/0b97311f58dfb28bd883d990f68d548da040a807/src/Utility/GenericActionPipelinePolicy.cs#L8
+*/
+
+using System;
+using System.ClientModel.Primitives;
+using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
+using System.Threading.Tasks;
+
+///
+/// Generic action pipeline policy for processing messages.
+///
+[ExcludeFromCodeCoverage]
+internal sealed class GenericActionPipelinePolicy : PipelinePolicy
+{
+ private readonly Action _processMessageAction;
+
+ internal GenericActionPipelinePolicy(Action processMessageAction)
+ {
+ this._processMessageAction = processMessageAction;
+ }
+
+ public override void Process(PipelineMessage message, IReadOnlyList pipeline, int currentIndex)
+ {
+ this._processMessageAction(message);
+ if (currentIndex < pipeline.Count - 1)
+ {
+ pipeline[currentIndex + 1].Process(message, pipeline, currentIndex + 1);
+ }
+ }
+
+ public override async ValueTask ProcessAsync(PipelineMessage message, IReadOnlyList pipeline, int currentIndex)
+ {
+ this._processMessageAction(message);
+ if (currentIndex < pipeline.Count - 1)
+ {
+ await pipeline[currentIndex + 1].ProcessAsync(message, pipeline, currentIndex + 1).ConfigureAwait(false);
+ }
+ }
+}
diff --git a/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj b/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj
index e929fe1ca82f..3cbaf6b60797 100644
--- a/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj
+++ b/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj
@@ -28,6 +28,7 @@
+
@@ -38,6 +39,7 @@
+
diff --git a/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Core/Models/AddHeaderRequestPolicyTests.cs b/dotnet/src/SemanticKernel.UnitTests/Utilities/OpenAI/GenericActionPipelinePolicyTests.cs
similarity index 54%
rename from dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Core/Models/AddHeaderRequestPolicyTests.cs
rename to dotnet/src/SemanticKernel.UnitTests/Utilities/OpenAI/GenericActionPipelinePolicyTests.cs
index 83ec6a20568d..ca36f300b1c2 100644
--- a/dotnet/src/Connectors/Connectors.OpenAIV2.UnitTests/Core/Models/AddHeaderRequestPolicyTests.cs
+++ b/dotnet/src/SemanticKernel.UnitTests/Utilities/OpenAI/GenericActionPipelinePolicyTests.cs
@@ -1,39 +1,35 @@
// Copyright (c) Microsoft. All rights reserved.
using System.ClientModel.Primitives;
-using Microsoft.SemanticKernel.Connectors.OpenAI;
using Xunit;
-namespace SemanticKernel.Connectors.OpenAI.UnitTests.Core.Models;
+namespace SemanticKernel.UnitTests.Utilities.OpenAI;
-public class AddHeaderRequestPolicyTests
+public class GenericActionPipelinePolicyTests
{
[Fact]
public void ItCanBeInstantiated()
{
- // Arrange
- var headerName = "headerName";
- var headerValue = "headerValue";
-
// Act
- var addHeaderRequestPolicy = new AddHeaderRequestPolicy(headerName, headerValue);
+ var addHeaderRequestPolicy = new GenericActionPipelinePolicy((message) => { });
// Assert
Assert.NotNull(addHeaderRequestPolicy);
}
[Fact]
- public void ItOnSendingRequestAddsHeaderToRequest()
+ public void ItProcessAddsHeaderToRequest()
{
// Arrange
var headerName = "headerName";
var headerValue = "headerValue";
- var addHeaderRequestPolicy = new AddHeaderRequestPolicy(headerName, headerValue);
+ var sut = new GenericActionPipelinePolicy((message) => { message.Request.Headers.Add(headerName, headerValue); });
+
var pipeline = ClientPipeline.Create();
var message = pipeline.CreateMessage();
// Act
- addHeaderRequestPolicy.OnSendingRequest(message);
+ sut.Process(message, [sut], 0);
// Assert
message.Request.Headers.TryGetValue(headerName, out var value);
From f8a22b8240940fb220d500be9cecb3e3429ecc6c Mon Sep 17 00:00:00 2001
From: SergeyMenshykh <68852919+SergeyMenshykh@users.noreply.github.com>
Date: Thu, 27 Jun 2024 18:34:36 +0100
Subject: [PATCH 010/226] .Net: Migrate Azure Chat Completion Service to
AzureOpenAI SDK v2 (#6984)
### Motivation and Context
This PR is the next step in a series of follow-up PRs to migrate
AzureOpenAIConnector to Azure AI SDK v2. It updates all code related to
AzureOpenAI ChatCompletionService to use the Azure AI SDK v2. One of the
goals of the PR is to update the code with a minimal number of changes
to make the code review as easy as possible, so almost all methods keep
their names as they were even though they might not be relevant anymore.
This will be fixed in one of the follow-up PRs.
### Description
This PR does the following:
1. Migrates AzureOpenAIChatCompletionService, ClientCore, and other
model classes both use, to Azure AI SDK v2.
2. Updates ToolCallBehavior classes to return a list of functions and
function choice. This change is required because the new SDK model
requires both of those for the CompletionsOptions class creation and
does not allow setting them after the class is already created, as it
used to allow.
3. Adapts related unit tests to the API changes.
### Next steps
1. Add integration tests.
2. Rename internal/private methods that were intentionally left with
old, irrelevant names to minimize the code review delta.
### Out of scope:
* https://github.com/microsoft/semantic-kernel/issues/6991
### Contribution Checklist
- [x] The code builds clean without any errors or warnings
- [x] The PR follows the [SK Contribution
Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md)
and the [pre-submission formatting
script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts)
raises no violations
- [x] All unit tests pass, and I have added new tests where possible
- [x] I didn't break anyone :smile:
---
...AzureOpenAIPromptExecutionSettingsTests.cs | 6 +-
.../AzureOpenAITestHelper.cs | 10 +
...cs => AzureOpenAIToolCallBehaviorTests.cs} | 82 +-
.../AzureOpenAIChatCompletionServiceTests.cs | 127 +--
.../AzureOpenAIChatMessageContentTests.cs | 31 +-
.../Core/AzureOpenAIFunctionToolCallTests.cs | 10 +-
...reOpenAIPluginCollectionExtensionsTests.cs | 8 +-
.../ClientResultExceptionExtensionsTests.cs | 53 ++
.../RequestFailedExceptionExtensionsTests.cs | 77 --
.../AutoFunctionInvocationFilterTests.cs | 37 +-
.../AzureOpenAIFunctionTests.cs | 42 +-
.../KernelFunctionMetadataExtensionsTests.cs | 4 +-
.../AddHeaderRequestPolicy.cs | 20 -
.../AzureOpenAIPromptExecutionSettings.cs | 46 +-
...vior.cs => AzureOpenAIToolCallBehavior.cs} | 86 +-
.../AzureOpenAIChatCompletionService.cs | 3 +-
....cs => ClientResultExceptionExtensions.cs} | 9 +-
.../Connectors.AzureOpenAI.csproj | 3 +-
.../Core/AzureOpenAIChatMessageContent.cs | 45 +-
.../Core/AzureOpenAIClientCore.cs | 11 +-
.../Core/AzureOpenAIFunction.cs | 20 +-
.../Core/AzureOpenAIFunctionToolCall.cs | 52 +-
.../AzureOpenAIPluginCollectionExtensions.cs | 4 +-
.../AzureOpenAIStreamingChatMessageContent.cs | 35 +-
.../Connectors.AzureOpenAI/Core/ClientCore.cs | 862 +++++++-----------
25 files changed, 714 insertions(+), 969 deletions(-)
rename dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/{AzureToolCallBehaviorTests.cs => AzureOpenAIToolCallBehaviorTests.cs} (69%)
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/ClientResultExceptionExtensionsTests.cs
delete mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/RequestFailedExceptionExtensionsTests.cs
delete mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI/AddHeaderRequestPolicy.cs
rename dotnet/src/Connectors/Connectors.AzureOpenAI/{AzureToolCallBehavior.cs => AzureOpenAIToolCallBehavior.cs} (78%)
rename dotnet/src/Connectors/Connectors.AzureOpenAI/{RequestFailedExceptionExtensions.cs => ClientResultExceptionExtensions.cs} (78%)
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureOpenAIPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureOpenAIPromptExecutionSettingsTests.cs
index 0cf1c4e2a9e3..7b50e36c5587 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureOpenAIPromptExecutionSettingsTests.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureOpenAIPromptExecutionSettingsTests.cs
@@ -26,12 +26,11 @@ public void ItCreatesOpenAIExecutionSettingsWithCorrectDefaults()
Assert.Equal(1, executionSettings.TopP);
Assert.Equal(0, executionSettings.FrequencyPenalty);
Assert.Equal(0, executionSettings.PresencePenalty);
- Assert.Equal(1, executionSettings.ResultsPerPrompt);
Assert.Null(executionSettings.StopSequences);
Assert.Null(executionSettings.TokenSelectionBiases);
Assert.Null(executionSettings.TopLogprobs);
Assert.Null(executionSettings.Logprobs);
- Assert.Null(executionSettings.AzureChatExtensionsOptions);
+ Assert.Null(executionSettings.AzureChatDataSource);
Assert.Equal(128, executionSettings.MaxTokens);
}
@@ -45,7 +44,6 @@ public void ItUsesExistingOpenAIExecutionSettings()
TopP = 0.7,
FrequencyPenalty = 0.7,
PresencePenalty = 0.7,
- ResultsPerPrompt = 2,
StopSequences = new string[] { "foo", "bar" },
ChatSystemPrompt = "chat system prompt",
MaxTokens = 128,
@@ -231,7 +229,6 @@ public void PromptExecutionSettingsFreezeWorksAsExpected()
// Assert
Assert.True(executionSettings.IsFrozen);
Assert.Throws(() => executionSettings.ModelId = "gpt-4");
- Assert.Throws(() => executionSettings.ResultsPerPrompt = 2);
Assert.Throws(() => executionSettings.Temperature = 1);
Assert.Throws(() => executionSettings.TopP = 1);
Assert.Throws(() => executionSettings.StopSequences?.Add("STOP"));
@@ -262,7 +259,6 @@ private static void AssertExecutionSettings(AzureOpenAIPromptExecutionSettings e
Assert.Equal(0.7, executionSettings.TopP);
Assert.Equal(0.7, executionSettings.FrequencyPenalty);
Assert.Equal(0.7, executionSettings.PresencePenalty);
- Assert.Equal(2, executionSettings.ResultsPerPrompt);
Assert.Equal(new string[] { "foo", "bar" }, executionSettings.StopSequences);
Assert.Equal("chat system prompt", executionSettings.ChatSystemPrompt);
Assert.Equal(new Dictionary() { { 1, 2 }, { 3, 4 } }, executionSettings.TokenSelectionBiases);
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureOpenAITestHelper.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureOpenAITestHelper.cs
index 9df4aae40c2d..31a7654fcfc6 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureOpenAITestHelper.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureOpenAITestHelper.cs
@@ -1,6 +1,7 @@
// Copyright (c) Microsoft. All rights reserved.
using System.IO;
+using System.Net.Http;
namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests;
@@ -17,4 +18,13 @@ internal static string GetTestResponse(string fileName)
{
return File.ReadAllText($"./TestData/{fileName}");
}
+
+ ///
+ /// Reads test response from file and create .
+ ///
+ /// Name of the file with test response.
+ internal static StreamContent GetTestResponseAsStream(string fileName)
+ {
+ return new StreamContent(File.OpenRead($"./TestData/{fileName}"));
+ }
}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureToolCallBehaviorTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureOpenAIToolCallBehaviorTests.cs
similarity index 69%
rename from dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureToolCallBehaviorTests.cs
rename to dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureOpenAIToolCallBehaviorTests.cs
index 525dabcd26d2..6baa78faae1e 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureToolCallBehaviorTests.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureOpenAIToolCallBehaviorTests.cs
@@ -2,23 +2,23 @@
using System.Collections.Generic;
using System.Linq;
-using Azure.AI.OpenAI;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
-using static Microsoft.SemanticKernel.Connectors.AzureOpenAI.AzureToolCallBehavior;
+using OpenAI.Chat;
+using static Microsoft.SemanticKernel.Connectors.AzureOpenAI.AzureOpenAIToolCallBehavior;
namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests;
///
-/// Unit tests for
+/// Unit tests for
///
-public sealed class AzureToolCallBehaviorTests
+public sealed class AzureOpenAIToolCallBehaviorTests
{
[Fact]
public void EnableKernelFunctionsReturnsCorrectKernelFunctionsInstance()
{
// Arrange & Act
- var behavior = AzureToolCallBehavior.EnableKernelFunctions;
+ var behavior = AzureOpenAIToolCallBehavior.EnableKernelFunctions;
// Assert
Assert.IsType(behavior);
@@ -30,7 +30,7 @@ public void AutoInvokeKernelFunctionsReturnsCorrectKernelFunctionsInstance()
{
// Arrange & Act
const int DefaultMaximumAutoInvokeAttempts = 128;
- var behavior = AzureToolCallBehavior.AutoInvokeKernelFunctions;
+ var behavior = AzureOpenAIToolCallBehavior.AutoInvokeKernelFunctions;
// Assert
Assert.IsType(behavior);
@@ -42,7 +42,7 @@ public void EnableFunctionsReturnsEnabledFunctionsInstance()
{
// Arrange & Act
List functions = [new("Plugin", "Function", "description", [], null)];
- var behavior = AzureToolCallBehavior.EnableFunctions(functions);
+ var behavior = AzureOpenAIToolCallBehavior.EnableFunctions(functions);
// Assert
Assert.IsType(behavior);
@@ -52,7 +52,7 @@ public void EnableFunctionsReturnsEnabledFunctionsInstance()
public void RequireFunctionReturnsRequiredFunctionInstance()
{
// Arrange & Act
- var behavior = AzureToolCallBehavior.RequireFunction(new("Plugin", "Function", "description", [], null));
+ var behavior = AzureOpenAIToolCallBehavior.RequireFunction(new("Plugin", "Function", "description", [], null));
// Assert
Assert.IsType(behavior);
@@ -63,13 +63,13 @@ public void KernelFunctionsConfigureOptionsWithNullKernelDoesNotAddTools()
{
// Arrange
var kernelFunctions = new KernelFunctions(autoInvoke: false);
- var chatCompletionsOptions = new ChatCompletionsOptions();
// Act
- kernelFunctions.ConfigureOptions(null, chatCompletionsOptions);
+ var options = kernelFunctions.ConfigureOptions(null);
// Assert
- Assert.Empty(chatCompletionsOptions.Tools);
+ Assert.Null(options.Choice);
+ Assert.Null(options.Tools);
}
[Fact]
@@ -77,15 +77,14 @@ public void KernelFunctionsConfigureOptionsWithoutFunctionsDoesNotAddTools()
{
// Arrange
var kernelFunctions = new KernelFunctions(autoInvoke: false);
- var chatCompletionsOptions = new ChatCompletionsOptions();
var kernel = Kernel.CreateBuilder().Build();
// Act
- kernelFunctions.ConfigureOptions(kernel, chatCompletionsOptions);
+ var options = kernelFunctions.ConfigureOptions(kernel);
// Assert
- Assert.Null(chatCompletionsOptions.ToolChoice);
- Assert.Empty(chatCompletionsOptions.Tools);
+ Assert.Null(options.Choice);
+ Assert.Null(options.Tools);
}
[Fact]
@@ -93,7 +92,6 @@ public void KernelFunctionsConfigureOptionsWithFunctionsAddsTools()
{
// Arrange
var kernelFunctions = new KernelFunctions(autoInvoke: false);
- var chatCompletionsOptions = new ChatCompletionsOptions();
var kernel = Kernel.CreateBuilder().Build();
var plugin = this.GetTestPlugin();
@@ -101,12 +99,12 @@ public void KernelFunctionsConfigureOptionsWithFunctionsAddsTools()
kernel.Plugins.Add(plugin);
// Act
- kernelFunctions.ConfigureOptions(kernel, chatCompletionsOptions);
+ var options = kernelFunctions.ConfigureOptions(kernel);
// Assert
- Assert.Equal(ChatCompletionsToolChoice.Auto, chatCompletionsOptions.ToolChoice);
+ Assert.Equal(ChatToolChoice.Auto, options.Choice);
- this.AssertTools(chatCompletionsOptions);
+ this.AssertTools(options.Tools);
}
[Fact]
@@ -114,14 +112,13 @@ public void EnabledFunctionsConfigureOptionsWithoutFunctionsDoesNotAddTools()
{
// Arrange
var enabledFunctions = new EnabledFunctions([], autoInvoke: false);
- var chatCompletionsOptions = new ChatCompletionsOptions();
// Act
- enabledFunctions.ConfigureOptions(null, chatCompletionsOptions);
+ var options = enabledFunctions.ConfigureOptions(null);
// Assert
- Assert.Null(chatCompletionsOptions.ToolChoice);
- Assert.Empty(chatCompletionsOptions.Tools);
+ Assert.Null(options.Choice);
+ Assert.Null(options.Tools);
}
[Fact]
@@ -130,10 +127,9 @@ public void EnabledFunctionsConfigureOptionsWithAutoInvokeAndNullKernelThrowsExc
// Arrange
var functions = this.GetTestPlugin().GetFunctionsMetadata().Select(function => function.ToAzureOpenAIFunction());
var enabledFunctions = new EnabledFunctions(functions, autoInvoke: true);
- var chatCompletionsOptions = new ChatCompletionsOptions();
// Act & Assert
- var exception = Assert.Throws(() => enabledFunctions.ConfigureOptions(null, chatCompletionsOptions));
+ var exception = Assert.Throws(() => enabledFunctions.ConfigureOptions(null));
Assert.Equal($"Auto-invocation with {nameof(EnabledFunctions)} is not supported when no kernel is provided.", exception.Message);
}
@@ -143,11 +139,10 @@ public void EnabledFunctionsConfigureOptionsWithAutoInvokeAndEmptyKernelThrowsEx
// Arrange
var functions = this.GetTestPlugin().GetFunctionsMetadata().Select(function => function.ToAzureOpenAIFunction());
var enabledFunctions = new EnabledFunctions(functions, autoInvoke: true);
- var chatCompletionsOptions = new ChatCompletionsOptions();
var kernel = Kernel.CreateBuilder().Build();
// Act & Assert
- var exception = Assert.Throws(() => enabledFunctions.ConfigureOptions(kernel, chatCompletionsOptions));
+ var exception = Assert.Throws(() => enabledFunctions.ConfigureOptions(kernel));
Assert.Equal($"The specified {nameof(EnabledFunctions)} function MyPlugin-MyFunction is not available in the kernel.", exception.Message);
}
@@ -160,18 +155,17 @@ public void EnabledFunctionsConfigureOptionsWithKernelAndPluginsAddsTools(bool a
var plugin = this.GetTestPlugin();
var functions = plugin.GetFunctionsMetadata().Select(function => function.ToAzureOpenAIFunction());
var enabledFunctions = new EnabledFunctions(functions, autoInvoke);
- var chatCompletionsOptions = new ChatCompletionsOptions();
var kernel = Kernel.CreateBuilder().Build();
kernel.Plugins.Add(plugin);
// Act
- enabledFunctions.ConfigureOptions(kernel, chatCompletionsOptions);
+ var options = enabledFunctions.ConfigureOptions(kernel);
// Assert
- Assert.Equal(ChatCompletionsToolChoice.Auto, chatCompletionsOptions.ToolChoice);
+ Assert.Equal(ChatToolChoice.Auto, options.Choice);
- this.AssertTools(chatCompletionsOptions);
+ this.AssertTools(options.Tools);
}
[Fact]
@@ -180,10 +174,9 @@ public void RequiredFunctionsConfigureOptionsWithAutoInvokeAndNullKernelThrowsEx
// Arrange
var function = this.GetTestPlugin().GetFunctionsMetadata().Select(function => function.ToAzureOpenAIFunction()).First();
var requiredFunction = new RequiredFunction(function, autoInvoke: true);
- var chatCompletionsOptions = new ChatCompletionsOptions();
// Act & Assert
- var exception = Assert.Throws(() => requiredFunction.ConfigureOptions(null, chatCompletionsOptions));
+ var exception = Assert.Throws(() => requiredFunction.ConfigureOptions(null));
Assert.Equal($"Auto-invocation with {nameof(RequiredFunction)} is not supported when no kernel is provided.", exception.Message);
}
@@ -193,11 +186,10 @@ public void RequiredFunctionsConfigureOptionsWithAutoInvokeAndEmptyKernelThrowsE
// Arrange
var function = this.GetTestPlugin().GetFunctionsMetadata().Select(function => function.ToAzureOpenAIFunction()).First();
var requiredFunction = new RequiredFunction(function, autoInvoke: true);
- var chatCompletionsOptions = new ChatCompletionsOptions();
var kernel = Kernel.CreateBuilder().Build();
// Act & Assert
- var exception = Assert.Throws(() => requiredFunction.ConfigureOptions(kernel, chatCompletionsOptions));
+ var exception = Assert.Throws(() => requiredFunction.ConfigureOptions(kernel));
Assert.Equal($"The specified {nameof(RequiredFunction)} function MyPlugin-MyFunction is not available in the kernel.", exception.Message);
}
@@ -207,18 +199,17 @@ public void RequiredFunctionConfigureOptionsAddsTools()
// Arrange
var plugin = this.GetTestPlugin();
var function = plugin.GetFunctionsMetadata()[0].ToAzureOpenAIFunction();
- var chatCompletionsOptions = new ChatCompletionsOptions();
var requiredFunction = new RequiredFunction(function, autoInvoke: true);
var kernel = new Kernel();
kernel.Plugins.Add(plugin);
// Act
- requiredFunction.ConfigureOptions(kernel, chatCompletionsOptions);
+ var options = requiredFunction.ConfigureOptions(kernel);
// Assert
- Assert.NotNull(chatCompletionsOptions.ToolChoice);
+ Assert.NotNull(options.Choice);
- this.AssertTools(chatCompletionsOptions);
+ this.AssertTools(options.Tools);
}
private KernelPlugin GetTestPlugin()
@@ -233,16 +224,15 @@ private KernelPlugin GetTestPlugin()
return KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]);
}
- private void AssertTools(ChatCompletionsOptions chatCompletionsOptions)
+ private void AssertTools(IList? tools)
{
- Assert.Single(chatCompletionsOptions.Tools);
-
- var tool = chatCompletionsOptions.Tools[0] as ChatCompletionsFunctionToolDefinition;
+ Assert.NotNull(tools);
+ var tool = Assert.Single(tools);
Assert.NotNull(tool);
- Assert.Equal("MyPlugin-MyFunction", tool.Name);
- Assert.Equal("Test Function", tool.Description);
- Assert.Equal("{\"type\":\"object\",\"required\":[],\"properties\":{\"parameter1\":{\"type\":\"string\"},\"parameter2\":{\"type\":\"string\"}}}", tool.Parameters.ToString());
+ Assert.Equal("MyPlugin-MyFunction", tool.FunctionName);
+ Assert.Equal("Test Function", tool.FunctionDescription);
+ Assert.Equal("{\"type\":\"object\",\"required\":[],\"properties\":{\"parameter1\":{\"type\":\"string\"},\"parameter2\":{\"type\":\"string\"}}}", tool.FunctionParameters.ToString());
}
}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/ChatCompletion/AzureOpenAIChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/ChatCompletion/AzureOpenAIChatCompletionServiceTests.cs
index 69c314bdcb46..3b3c90687b45 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/ChatCompletion/AzureOpenAIChatCompletionServiceTests.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/ChatCompletion/AzureOpenAIChatCompletionServiceTests.cs
@@ -10,6 +10,7 @@
using System.Text.Json;
using System.Threading.Tasks;
using Azure.AI.OpenAI;
+using Azure.AI.OpenAI.Chat;
using Azure.Core;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
@@ -17,6 +18,7 @@
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
using Moq;
+using OpenAI.Chat;
namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.ChatCompletion;
@@ -79,7 +81,7 @@ public void ConstructorWithTokenCredentialWorksCorrectly(bool includeLoggerFacto
public void ConstructorWithOpenAIClientWorksCorrectly(bool includeLoggerFactory)
{
// Arrange & Act
- var client = new OpenAIClient("key");
+ var client = new AzureOpenAIClient(new Uri("http://host"), "key");
var service = includeLoggerFactory ?
new AzureOpenAIChatCompletionService("deployment", client, "model-id", loggerFactory: this._mockLoggerFactory.Object) :
new AzureOpenAIChatCompletionService("deployment", client, "model-id");
@@ -106,45 +108,14 @@ public async Task GetTextContentsWorksCorrectlyAsync()
Assert.True(result.Count > 0);
Assert.Equal("Test chat response", result[0].Text);
- var usage = result[0].Metadata?["Usage"] as CompletionsUsage;
+ var usage = result[0].Metadata?["Usage"] as ChatTokenUsage;
Assert.NotNull(usage);
- Assert.Equal(55, usage.PromptTokens);
- Assert.Equal(100, usage.CompletionTokens);
+ Assert.Equal(55, usage.InputTokens);
+ Assert.Equal(100, usage.OutputTokens);
Assert.Equal(155, usage.TotalTokens);
}
- [Fact]
- public async Task GetChatMessageContentsWithEmptyChoicesThrowsExceptionAsync()
- {
- // Arrange
- var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
- this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK)
- {
- Content = new StringContent("{\"id\":\"response-id\",\"object\":\"chat.completion\",\"created\":1704208954,\"model\":\"gpt-4\",\"choices\":[],\"usage\":{\"prompt_tokens\":55,\"completion_tokens\":100,\"total_tokens\":155},\"system_fingerprint\":null}")
- });
-
- // Act & Assert
- var exception = await Assert.ThrowsAsync(() => service.GetChatMessageContentsAsync([]));
-
- Assert.Equal("Chat completions not found", exception.Message);
- }
-
- [Theory]
- [InlineData(0)]
- [InlineData(129)]
- public async Task GetChatMessageContentsWithInvalidResultsPerPromptValueThrowsExceptionAsync(int resultsPerPrompt)
- {
- // Arrange
- var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
- var settings = new AzureOpenAIPromptExecutionSettings { ResultsPerPrompt = resultsPerPrompt };
-
- // Act & Assert
- var exception = await Assert.ThrowsAsync(() => service.GetChatMessageContentsAsync([], settings));
-
- Assert.Contains("The value must be in range between", exception.Message, StringComparison.OrdinalIgnoreCase);
- }
-
[Fact]
public async Task GetChatMessageContentsHandlesSettingsCorrectlyAsync()
{
@@ -157,22 +128,16 @@ public async Task GetChatMessageContentsHandlesSettingsCorrectlyAsync()
TopP = 0.5,
FrequencyPenalty = 1.6,
PresencePenalty = 1.2,
- ResultsPerPrompt = 5,
Seed = 567,
TokenSelectionBiases = new Dictionary { { 2, 3 } },
StopSequences = ["stop_sequence"],
Logprobs = true,
TopLogprobs = 5,
- AzureChatExtensionsOptions = new AzureChatExtensionsOptions
+ AzureChatDataSource = new AzureSearchChatDataSource()
{
- Extensions =
- {
- new AzureSearchChatExtensionConfiguration
- {
- SearchEndpoint = new Uri("http://test-search-endpoint"),
- IndexName = "test-index-name"
- }
- }
+ Endpoint = new Uri("http://test-search-endpoint"),
+ IndexName = "test-index-name",
+ Authentication = DataSourceAuthentication.FromApiKey("api-key"),
}
};
@@ -226,7 +191,6 @@ public async Task GetChatMessageContentsHandlesSettingsCorrectlyAsync()
Assert.Equal(0.5, content.GetProperty("top_p").GetDouble());
Assert.Equal(1.6, content.GetProperty("frequency_penalty").GetDouble());
Assert.Equal(1.2, content.GetProperty("presence_penalty").GetDouble());
- Assert.Equal(5, content.GetProperty("n").GetInt32());
Assert.Equal(567, content.GetProperty("seed").GetInt32());
Assert.Equal(3, content.GetProperty("logit_bias").GetProperty("2").GetInt32());
Assert.Equal("stop_sequence", content.GetProperty("stop")[0].GetString());
@@ -259,7 +223,7 @@ public async Task GetChatMessageContentsHandlesResponseFormatCorrectlyAsync(obje
});
// Act
- var result = await service.GetChatMessageContentsAsync([], settings);
+ var result = await service.GetChatMessageContentsAsync(new ChatHistory("System message"), settings);
// Assert
var requestContent = this._messageHandlerStub.RequestContents[0];
@@ -273,7 +237,7 @@ public async Task GetChatMessageContentsHandlesResponseFormatCorrectlyAsync(obje
[Theory]
[MemberData(nameof(ToolCallBehaviors))]
- public async Task GetChatMessageContentsWorksCorrectlyAsync(AzureToolCallBehavior behavior)
+ public async Task GetChatMessageContentsWorksCorrectlyAsync(AzureOpenAIToolCallBehavior behavior)
{
// Arrange
var kernel = Kernel.CreateBuilder().Build();
@@ -286,20 +250,20 @@ public async Task GetChatMessageContentsWorksCorrectlyAsync(AzureToolCallBehavio
});
// Act
- var result = await service.GetChatMessageContentsAsync([], settings, kernel);
+ var result = await service.GetChatMessageContentsAsync(new ChatHistory("System message"), settings, kernel);
// Assert
Assert.True(result.Count > 0);
Assert.Equal("Test chat response", result[0].Content);
- var usage = result[0].Metadata?["Usage"] as CompletionsUsage;
+ var usage = result[0].Metadata?["Usage"] as ChatTokenUsage;
Assert.NotNull(usage);
- Assert.Equal(55, usage.PromptTokens);
- Assert.Equal(100, usage.CompletionTokens);
+ Assert.Equal(55, usage.InputTokens);
+ Assert.Equal(100, usage.OutputTokens);
Assert.Equal(155, usage.TotalTokens);
- Assert.Equal("stop", result[0].Metadata?["FinishReason"]);
+ Assert.Equal("Stop", result[0].Metadata?["FinishReason"]);
}
[Fact]
@@ -324,7 +288,7 @@ public async Task GetChatMessageContentsWithFunctionCallAsync()
kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]));
var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object);
- var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions };
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureOpenAIToolCallBehavior.AutoInvokeKernelFunctions };
using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_multiple_function_calls_test_response.json")) };
using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) };
@@ -332,7 +296,7 @@ public async Task GetChatMessageContentsWithFunctionCallAsync()
this._messageHandlerStub.ResponsesToReturn = [response1, response2];
// Act
- var result = await service.GetChatMessageContentsAsync([], settings, kernel);
+ var result = await service.GetChatMessageContentsAsync(new ChatHistory("System message"), settings, kernel);
// Assert
Assert.True(result.Count > 0);
@@ -360,7 +324,7 @@ public async Task GetChatMessageContentsWithFunctionCallMaximumAutoInvokeAttempt
kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]));
var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object);
- var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions };
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureOpenAIToolCallBehavior.AutoInvokeKernelFunctions };
var responses = new List();
@@ -372,7 +336,7 @@ public async Task GetChatMessageContentsWithFunctionCallMaximumAutoInvokeAttempt
this._messageHandlerStub.ResponsesToReturn = responses;
// Act
- var result = await service.GetChatMessageContentsAsync([], settings, kernel);
+ var result = await service.GetChatMessageContentsAsync(new ChatHistory("System message"), settings, kernel);
// Assert
Assert.Equal(DefaultMaximumAutoInvokeAttempts, functionCallCount);
@@ -397,7 +361,7 @@ public async Task GetChatMessageContentsWithRequiredFunctionCallAsync()
kernel.Plugins.Add(plugin);
var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object);
- var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureToolCallBehavior.RequireFunction(openAIFunction, autoInvoke: true) };
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureOpenAIToolCallBehavior.RequireFunction(openAIFunction, autoInvoke: true) };
using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_single_function_call_test_response.json")) };
using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) };
@@ -405,7 +369,7 @@ public async Task GetChatMessageContentsWithRequiredFunctionCallAsync()
this._messageHandlerStub.ResponsesToReturn = [response1, response2];
// Act
- var result = await service.GetChatMessageContentsAsync([], settings, kernel);
+ var result = await service.GetChatMessageContentsAsync(new ChatHistory("System message"), settings, kernel);
// Assert
Assert.Equal(1, functionCallCount);
@@ -447,7 +411,7 @@ public async Task GetStreamingTextContentsWorksCorrectlyAsync()
Assert.Equal("Test chat streaming response", enumerator.Current.Text);
await enumerator.MoveNextAsync();
- Assert.Equal("stop", enumerator.Current.Metadata?["FinishReason"]);
+ Assert.Equal("Stop", enumerator.Current.Metadata?["FinishReason"]);
}
[Fact]
@@ -469,7 +433,7 @@ public async Task GetStreamingChatMessageContentsWorksCorrectlyAsync()
Assert.Equal("Test chat streaming response", enumerator.Current.Content);
await enumerator.MoveNextAsync();
- Assert.Equal("stop", enumerator.Current.Metadata?["FinishReason"]);
+ Assert.Equal("Stop", enumerator.Current.Metadata?["FinishReason"]);
}
[Fact]
@@ -494,10 +458,10 @@ public async Task GetStreamingChatMessageContentsWithFunctionCallAsync()
kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]));
var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object);
- var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions };
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureOpenAIToolCallBehavior.AutoInvokeKernelFunctions };
- using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_streaming_multiple_function_calls_test_response.txt")) };
- using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt")) };
+ using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = AzureOpenAITestHelper.GetTestResponseAsStream("chat_completion_streaming_multiple_function_calls_test_response.txt") };
+ using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = AzureOpenAITestHelper.GetTestResponseAsStream("chat_completion_streaming_test_response.txt") };
this._messageHandlerStub.ResponsesToReturn = [response1, response2];
@@ -506,10 +470,10 @@ public async Task GetStreamingChatMessageContentsWithFunctionCallAsync()
await enumerator.MoveNextAsync();
Assert.Equal("Test chat streaming response", enumerator.Current.Content);
- Assert.Equal("tool_calls", enumerator.Current.Metadata?["FinishReason"]);
+ Assert.Equal("ToolCalls", enumerator.Current.Metadata?["FinishReason"]);
await enumerator.MoveNextAsync();
- Assert.Equal("tool_calls", enumerator.Current.Metadata?["FinishReason"]);
+ Assert.Equal("ToolCalls", enumerator.Current.Metadata?["FinishReason"]);
// Keep looping until the end of stream
while (await enumerator.MoveNextAsync())
@@ -538,13 +502,13 @@ public async Task GetStreamingChatMessageContentsWithFunctionCallMaximumAutoInvo
kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]));
var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object);
- var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions };
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureOpenAIToolCallBehavior.AutoInvokeKernelFunctions };
var responses = new List();
for (var i = 0; i < ModelResponsesCount; i++)
{
- responses.Add(new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_streaming_single_function_call_test_response.txt")) });
+ responses.Add(new HttpResponseMessage(HttpStatusCode.OK) { Content = AzureOpenAITestHelper.GetTestResponseAsStream("chat_completion_streaming_single_function_call_test_response.txt") });
}
this._messageHandlerStub.ResponsesToReturn = responses;
@@ -577,10 +541,10 @@ public async Task GetStreamingChatMessageContentsWithRequiredFunctionCallAsync()
kernel.Plugins.Add(plugin);
var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object);
- var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureToolCallBehavior.RequireFunction(openAIFunction, autoInvoke: true) };
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureOpenAIToolCallBehavior.RequireFunction(openAIFunction, autoInvoke: true) };
- using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_streaming_single_function_call_test_response.txt")) };
- using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt")) };
+ using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = AzureOpenAITestHelper.GetTestResponseAsStream("chat_completion_streaming_single_function_call_test_response.txt") };
+ using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = AzureOpenAITestHelper.GetTestResponseAsStream("chat_completion_streaming_test_response.txt") };
this._messageHandlerStub.ResponsesToReturn = [response1, response2];
@@ -590,7 +554,7 @@ public async Task GetStreamingChatMessageContentsWithRequiredFunctionCallAsync()
// Function Tool Call Streaming (One Chunk)
await enumerator.MoveNextAsync();
Assert.Equal("Test chat streaming response", enumerator.Current.Content);
- Assert.Equal("tool_calls", enumerator.Current.Metadata?["FinishReason"]);
+ Assert.Equal("ToolCalls", enumerator.Current.Metadata?["FinishReason"]);
// Chat Completion Streaming (1st Chunk)
await enumerator.MoveNextAsync();
@@ -598,7 +562,7 @@ public async Task GetStreamingChatMessageContentsWithRequiredFunctionCallAsync()
// Chat Completion Streaming (2nd Chunk)
await enumerator.MoveNextAsync();
- Assert.Equal("stop", enumerator.Current.Metadata?["FinishReason"]);
+ Assert.Equal("Stop", enumerator.Current.Metadata?["FinishReason"]);
Assert.Equal(1, functionCallCount);
@@ -736,7 +700,7 @@ public async Task FunctionCallsShouldBePropagatedToCallersViaChatMessageItemsOfT
var chatHistory = new ChatHistory();
chatHistory.AddUserMessage("Fake prompt");
- var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureToolCallBehavior.EnableKernelFunctions };
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureOpenAIToolCallBehavior.EnableKernelFunctions };
// Act
var result = await sut.GetChatMessageContentAsync(chatHistory, settings);
@@ -806,7 +770,7 @@ public async Task FunctionCallsShouldBeReturnedToLLMAsync()
new ChatMessageContent(AuthorRole.Assistant, items)
];
- var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureToolCallBehavior.EnableKernelFunctions };
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureOpenAIToolCallBehavior.EnableKernelFunctions };
// Act
await sut.GetChatMessageContentAsync(chatHistory, settings);
@@ -865,7 +829,7 @@ public async Task FunctionResultsCanBeProvidedToLLMAsOneResultPerChatMessageAsyn
])
};
- var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureToolCallBehavior.EnableKernelFunctions };
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureOpenAIToolCallBehavior.EnableKernelFunctions };
// Act
await sut.GetChatMessageContentAsync(chatHistory, settings);
@@ -910,7 +874,7 @@ public async Task FunctionResultsCanBeProvidedToLLMAsManyResultsInOneChatMessage
])
};
- var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureToolCallBehavior.EnableKernelFunctions };
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureOpenAIToolCallBehavior.EnableKernelFunctions };
// Act
await sut.GetChatMessageContentAsync(chatHistory, settings);
@@ -941,18 +905,15 @@ public void Dispose()
this._messageHandlerStub.Dispose();
}
- public static TheoryData ToolCallBehaviors => new()
+ public static TheoryData ToolCallBehaviors => new()
{
- AzureToolCallBehavior.EnableKernelFunctions,
- AzureToolCallBehavior.AutoInvokeKernelFunctions
+ AzureOpenAIToolCallBehavior.EnableKernelFunctions,
+ AzureOpenAIToolCallBehavior.AutoInvokeKernelFunctions
};
public static TheoryData ResponseFormats => new()
{
- { new FakeChatCompletionsResponseFormat(), null },
{ "json_object", "json_object" },
{ "text", "text" }
};
-
- private sealed class FakeChatCompletionsResponseFormat : ChatCompletionsResponseFormat;
}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureOpenAIChatMessageContentTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureOpenAIChatMessageContentTests.cs
index 304e62bc9aeb..76e0b2064439 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureOpenAIChatMessageContentTests.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureOpenAIChatMessageContentTests.cs
@@ -3,9 +3,9 @@
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
-using Azure.AI.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+using OpenAI.Chat;
namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Core;
@@ -18,10 +18,10 @@ public sealed class AzureOpenAIChatMessageContentTests
public void ConstructorsWorkCorrectly()
{
// Arrange
- List toolCalls = [new FakeChatCompletionsToolCall("id")];
+ List toolCalls = [ChatToolCall.CreateFunctionToolCall("id", "name", "args")];
// Act
- var content1 = new AzureOpenAIChatMessageContent(new ChatRole("user"), "content1", "model-id1", toolCalls) { AuthorName = "Fred" };
+ var content1 = new AzureOpenAIChatMessageContent(ChatMessageRole.User, "content1", "model-id1", toolCalls) { AuthorName = "Fred" };
var content2 = new AzureOpenAIChatMessageContent(AuthorRole.User, "content2", "model-id2", toolCalls);
// Assert
@@ -33,11 +33,9 @@ public void ConstructorsWorkCorrectly()
public void GetOpenAIFunctionToolCallsReturnsCorrectList()
{
// Arrange
- List toolCalls = [
- new ChatCompletionsFunctionToolCall("id1", "name", string.Empty),
- new ChatCompletionsFunctionToolCall("id2", "name", string.Empty),
- new FakeChatCompletionsToolCall("id3"),
- new FakeChatCompletionsToolCall("id4")];
+ List toolCalls = [
+ ChatToolCall.CreateFunctionToolCall("id1", "name", string.Empty),
+ ChatToolCall.CreateFunctionToolCall("id2", "name", string.Empty)];
var content1 = new AzureOpenAIChatMessageContent(AuthorRole.User, "content", "model-id", toolCalls);
var content2 = new AzureOpenAIChatMessageContent(AuthorRole.User, "content", "model-id", []);
@@ -64,11 +62,9 @@ public void MetadataIsInitializedCorrectly(bool readOnlyMetadata)
new CustomReadOnlyDictionary(new Dictionary { { "key", "value" } }) :
new Dictionary { { "key", "value" } };
- List toolCalls = [
- new ChatCompletionsFunctionToolCall("id1", "name", string.Empty),
- new ChatCompletionsFunctionToolCall("id2", "name", string.Empty),
- new FakeChatCompletionsToolCall("id3"),
- new FakeChatCompletionsToolCall("id4")];
+ List toolCalls = [
+ ChatToolCall.CreateFunctionToolCall("id1", "name", string.Empty),
+ ChatToolCall.CreateFunctionToolCall("id2", "name", string.Empty)];
// Act
var content1 = new AzureOpenAIChatMessageContent(AuthorRole.User, "content1", "model-id1", [], metadata);
@@ -82,9 +78,9 @@ public void MetadataIsInitializedCorrectly(bool readOnlyMetadata)
Assert.Equal(2, content2.Metadata.Count);
Assert.Equal("value", content2.Metadata["key"]);
- Assert.IsType>(content2.Metadata["ChatResponseMessage.FunctionToolCalls"]);
+ Assert.IsType>(content2.Metadata["ChatResponseMessage.FunctionToolCalls"]);
- var actualToolCalls = content2.Metadata["ChatResponseMessage.FunctionToolCalls"] as List;
+ var actualToolCalls = content2.Metadata["ChatResponseMessage.FunctionToolCalls"] as List;
Assert.NotNull(actualToolCalls);
Assert.Equal(2, actualToolCalls.Count);
@@ -96,7 +92,7 @@ private void AssertChatMessageContent(
AuthorRole expectedRole,
string expectedContent,
string expectedModelId,
- IReadOnlyList expectedToolCalls,
+ IReadOnlyList expectedToolCalls,
AzureOpenAIChatMessageContent actualContent,
string? expectedName = null)
{
@@ -107,9 +103,6 @@ private void AssertChatMessageContent(
Assert.Same(expectedToolCalls, actualContent.ToolCalls);
}
- private sealed class FakeChatCompletionsToolCall(string id) : ChatCompletionsToolCall(id)
- { }
-
private sealed class CustomReadOnlyDictionary(IDictionary dictionary) : IReadOnlyDictionary // explicitly not implementing IDictionary<>
{
public TValue this[TKey key] => dictionary[key];
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureOpenAIFunctionToolCallTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureOpenAIFunctionToolCallTests.cs
index 8f16c6ea7db2..766376ee00b9 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureOpenAIFunctionToolCallTests.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureOpenAIFunctionToolCallTests.cs
@@ -2,8 +2,8 @@
using System.Collections.Generic;
using System.Text;
-using Azure.AI.OpenAI;
using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+using OpenAI.Chat;
namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Core;
@@ -18,7 +18,7 @@ public sealed class AzureOpenAIFunctionToolCallTests
public void FullyQualifiedNameReturnsValidName(string toolCallName, string expectedName)
{
// Arrange
- var toolCall = new ChatCompletionsFunctionToolCall("id", toolCallName, string.Empty);
+ var toolCall = ChatToolCall.CreateFunctionToolCall("id", toolCallName, string.Empty);
var openAIFunctionToolCall = new AzureOpenAIFunctionToolCall(toolCall);
// Act & Assert
@@ -30,7 +30,7 @@ public void FullyQualifiedNameReturnsValidName(string toolCallName, string expec
public void ToStringReturnsCorrectValue()
{
// Arrange
- var toolCall = new ChatCompletionsFunctionToolCall("id", "MyPlugin_MyFunction", "{\n \"location\": \"San Diego\",\n \"max_price\": 300\n}");
+ var toolCall = ChatToolCall.CreateFunctionToolCall("id", "MyPlugin_MyFunction", "{\n \"location\": \"San Diego\",\n \"max_price\": 300\n}");
var openAIFunctionToolCall = new AzureOpenAIFunctionToolCall(toolCall);
// Act & Assert
@@ -75,7 +75,7 @@ public void ConvertToolCallUpdatesWithNotEmptyIndexesReturnsNotEmptyToolCalls()
var toolCall = toolCalls[0];
Assert.Equal("test-id", toolCall.Id);
- Assert.Equal("test-function", toolCall.Name);
- Assert.Equal("test-argument", toolCall.Arguments);
+ Assert.Equal("test-function", toolCall.FunctionName);
+ Assert.Equal("test-argument", toolCall.FunctionArguments);
}
}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureOpenAIPluginCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureOpenAIPluginCollectionExtensionsTests.cs
index bbfb636196d3..e0642abc52e1 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureOpenAIPluginCollectionExtensionsTests.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureOpenAIPluginCollectionExtensionsTests.cs
@@ -1,8 +1,8 @@
// Copyright (c) Microsoft. All rights reserved.
-using Azure.AI.OpenAI;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+using OpenAI.Chat;
namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Core;
@@ -18,7 +18,7 @@ public void TryGetFunctionAndArgumentsWithNonExistingFunctionReturnsFalse()
var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin");
var plugins = new KernelPluginCollection([plugin]);
- var toolCall = new ChatCompletionsFunctionToolCall("id", "MyPlugin_MyFunction", string.Empty);
+ var toolCall = ChatToolCall.CreateFunctionToolCall("id", "MyPlugin_MyFunction", string.Empty);
// Act
var result = plugins.TryGetFunctionAndArguments(toolCall, out var actualFunction, out var actualArguments);
@@ -37,7 +37,7 @@ public void TryGetFunctionAndArgumentsWithoutArgumentsReturnsTrue()
var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]);
var plugins = new KernelPluginCollection([plugin]);
- var toolCall = new ChatCompletionsFunctionToolCall("id", "MyPlugin-MyFunction", string.Empty);
+ var toolCall = ChatToolCall.CreateFunctionToolCall("id", "MyPlugin-MyFunction", string.Empty);
// Act
var result = plugins.TryGetFunctionAndArguments(toolCall, out var actualFunction, out var actualArguments);
@@ -56,7 +56,7 @@ public void TryGetFunctionAndArgumentsWithArgumentsReturnsTrue()
var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]);
var plugins = new KernelPluginCollection([plugin]);
- var toolCall = new ChatCompletionsFunctionToolCall("id", "MyPlugin-MyFunction", "{\n \"location\": \"San Diego\",\n \"max_price\": 300\n,\n \"null_argument\": null\n}");
+ var toolCall = ChatToolCall.CreateFunctionToolCall("id", "MyPlugin-MyFunction", "{\n \"location\": \"San Diego\",\n \"max_price\": 300\n,\n \"null_argument\": null\n}");
// Act
var result = plugins.TryGetFunctionAndArguments(toolCall, out var actualFunction, out var actualArguments);
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/ClientResultExceptionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/ClientResultExceptionExtensionsTests.cs
new file mode 100644
index 000000000000..d810b2d2a470
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/ClientResultExceptionExtensionsTests.cs
@@ -0,0 +1,53 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.ClientModel;
+using System.ClientModel.Primitives;
+using System.IO;
+using System.Net;
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+
+namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Core;
+
+///
+/// Unit tests for class.
+///
+public sealed class ClientResultExceptionExtensionsTests
+{
+ [Fact]
+ public void ToHttpOperationExceptionWithContentReturnsValidException()
+ {
+ // Arrange
+ using var response = new FakeResponse("Response Content", 500);
+ var exception = new ClientResultException(response);
+
+ // Act
+ var actualException = exception.ToHttpOperationException();
+
+ // Assert
+ Assert.IsType(actualException);
+ Assert.Equal(HttpStatusCode.InternalServerError, actualException.StatusCode);
+ Assert.Equal("Response Content", actualException.ResponseContent);
+ Assert.Same(exception, actualException.InnerException);
+ }
+
+ #region private
+
+ private sealed class FakeResponse(string responseContent, int status) : PipelineResponse
+ {
+ private readonly string _responseContent = responseContent;
+ public override BinaryData Content => BinaryData.FromString(this._responseContent);
+ public override int Status { get; } = status;
+ public override string ReasonPhrase => "Reason Phrase";
+ public override Stream? ContentStream { get => null; set => throw new NotImplementedException(); }
+ protected override PipelineResponseHeaders HeadersCore => throw new NotImplementedException();
+ public override BinaryData BufferContent(CancellationToken cancellationToken = default) => new(this._responseContent);
+ public override ValueTask BufferContentAsync(CancellationToken cancellationToken = default) => throw new NotImplementedException();
+ public override void Dispose() { }
+ }
+
+ #endregion
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/RequestFailedExceptionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/RequestFailedExceptionExtensionsTests.cs
deleted file mode 100644
index 9fb65039116d..000000000000
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/RequestFailedExceptionExtensionsTests.cs
+++ /dev/null
@@ -1,77 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using System;
-using System.Collections.Generic;
-using System.IO;
-using System.Net;
-using Azure;
-using Azure.Core;
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
-
-namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Core;
-
-///
-/// Unit tests for class.
-///
-public sealed class RequestFailedExceptionExtensionsTests
-{
- [Theory]
- [InlineData(0, null)]
- [InlineData(500, HttpStatusCode.InternalServerError)]
- public void ToHttpOperationExceptionWithStatusReturnsValidException(int responseStatus, HttpStatusCode? httpStatusCode)
- {
- // Arrange
- var exception = new RequestFailedException(responseStatus, "Error Message");
-
- // Act
- var actualException = exception.ToHttpOperationException();
-
- // Assert
- Assert.IsType(actualException);
- Assert.Equal(httpStatusCode, actualException.StatusCode);
- Assert.Equal("Error Message", actualException.Message);
- Assert.Same(exception, actualException.InnerException);
- }
-
- [Fact]
- public void ToHttpOperationExceptionWithContentReturnsValidException()
- {
- // Arrange
- using var response = new FakeResponse("Response Content", 500);
- var exception = new RequestFailedException(response);
-
- // Act
- var actualException = exception.ToHttpOperationException();
-
- // Assert
- Assert.IsType(actualException);
- Assert.Equal(HttpStatusCode.InternalServerError, actualException.StatusCode);
- Assert.Equal("Response Content", actualException.ResponseContent);
- Assert.Same(exception, actualException.InnerException);
- }
-
- #region private
-
- private sealed class FakeResponse(string responseContent, int status) : Response
- {
- private readonly string _responseContent = responseContent;
- private readonly IEnumerable _headers = [];
-
- public override BinaryData Content => BinaryData.FromString(this._responseContent);
- public override int Status { get; } = status;
- public override string ReasonPhrase => "Reason Phrase";
- public override Stream? ContentStream { get => null; set => throw new NotImplementedException(); }
- public override string ClientRequestId { get => "Client Request Id"; set => throw new NotImplementedException(); }
-
- public override void Dispose() { }
- protected override bool ContainsHeader(string name) => throw new NotImplementedException();
- protected override IEnumerable EnumerateHeaders() => this._headers;
-#pragma warning disable CS8765 // Nullability of type of parameter doesn't match overridden member (possibly because of nullability attributes).
- protected override bool TryGetHeader(string name, out string? value) => throw new NotImplementedException();
- protected override bool TryGetHeaderValues(string name, out IEnumerable? values) => throw new NotImplementedException();
-#pragma warning restore CS8765 // Nullability of type of parameter doesn't match overridden member (possibly because of nullability attributes).
- }
-
- #endregion
-}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/FunctionCalling/AutoFunctionInvocationFilterTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/FunctionCalling/AutoFunctionInvocationFilterTests.cs
index 270b055d730c..195f71e2758f 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/FunctionCalling/AutoFunctionInvocationFilterTests.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/FunctionCalling/AutoFunctionInvocationFilterTests.cs
@@ -64,7 +64,7 @@ public async Task FiltersAreExecutedCorrectlyAsync()
// Act
var result = await kernel.InvokePromptAsync("Test prompt", new(new AzureOpenAIPromptExecutionSettings
{
- ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions
+ ToolCallBehavior = AzureOpenAIToolCallBehavior.AutoInvokeKernelFunctions
}));
// Assert
@@ -107,7 +107,7 @@ public async Task FiltersAreExecutedCorrectlyOnStreamingAsync()
this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingStreamingResponses();
- var executionSettings = new AzureOpenAIPromptExecutionSettings { ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions };
+ var executionSettings = new AzureOpenAIPromptExecutionSettings { ToolCallBehavior = AzureOpenAIToolCallBehavior.AutoInvokeKernelFunctions };
// Act
await foreach (var item in kernel.InvokePromptStreamingAsync("Test prompt", new(executionSettings)))
@@ -167,7 +167,7 @@ public async Task DifferentWaysOfAddingFiltersWorkCorrectlyAsync()
var result = await kernel.InvokePromptAsync("Test prompt", new(new AzureOpenAIPromptExecutionSettings
{
- ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions
+ ToolCallBehavior = AzureOpenAIToolCallBehavior.AutoInvokeKernelFunctions
}));
// Assert
@@ -227,7 +227,7 @@ public async Task MultipleFiltersAreExecutedInOrderAsync(bool isStreaming)
var arguments = new KernelArguments(new AzureOpenAIPromptExecutionSettings
{
- ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions
+ ToolCallBehavior = AzureOpenAIToolCallBehavior.AutoInvokeKernelFunctions
});
// Act
@@ -277,7 +277,7 @@ public async Task FilterCanOverrideArgumentsAsync()
// Act
var result = await kernel.InvokePromptAsync("Test prompt", new(new AzureOpenAIPromptExecutionSettings
{
- ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions
+ ToolCallBehavior = AzureOpenAIToolCallBehavior.AutoInvokeKernelFunctions
}));
// Assert
@@ -309,9 +309,10 @@ public async Task FilterCanHandleExceptionAsync()
var chatCompletion = new AzureOpenAIChatCompletionService("test-deployment", "https://endpoint", "test-api-key", "test-model-id", this._httpClient);
- var executionSettings = new AzureOpenAIPromptExecutionSettings { ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions };
+ var executionSettings = new AzureOpenAIPromptExecutionSettings { ToolCallBehavior = AzureOpenAIToolCallBehavior.AutoInvokeKernelFunctions };
var chatHistory = new ChatHistory();
+ chatHistory.AddSystemMessage("System message");
// Act
var result = await chatCompletion.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel);
@@ -349,7 +350,7 @@ public async Task FilterCanHandleExceptionOnStreamingAsync()
var chatCompletion = new AzureOpenAIChatCompletionService("test-deployment", "https://endpoint", "test-api-key", "test-model-id", this._httpClient);
var chatHistory = new ChatHistory();
- var executionSettings = new AzureOpenAIPromptExecutionSettings { ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions };
+ var executionSettings = new AzureOpenAIPromptExecutionSettings { ToolCallBehavior = AzureOpenAIToolCallBehavior.AutoInvokeKernelFunctions };
// Act
await foreach (var item in chatCompletion.GetStreamingChatMessageContentsAsync(chatHistory, executionSettings, kernel))
@@ -395,7 +396,7 @@ public async Task FiltersCanSkipFunctionExecutionAsync()
// Act
var result = await kernel.InvokePromptAsync("Test prompt", new(new AzureOpenAIPromptExecutionSettings
{
- ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions
+ ToolCallBehavior = AzureOpenAIToolCallBehavior.AutoInvokeKernelFunctions
}));
// Assert
@@ -429,7 +430,7 @@ public async Task PreFilterCanTerminateOperationAsync()
// Act
await kernel.InvokePromptAsync("Test prompt", new(new AzureOpenAIPromptExecutionSettings
{
- ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions
+ ToolCallBehavior = AzureOpenAIToolCallBehavior.AutoInvokeKernelFunctions
}));
// Assert
@@ -459,7 +460,7 @@ public async Task PreFilterCanTerminateOperationOnStreamingAsync()
this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingStreamingResponses();
- var executionSettings = new AzureOpenAIPromptExecutionSettings { ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions };
+ var executionSettings = new AzureOpenAIPromptExecutionSettings { ToolCallBehavior = AzureOpenAIToolCallBehavior.AutoInvokeKernelFunctions };
// Act
await foreach (var item in kernel.InvokePromptStreamingAsync("Test prompt", new(executionSettings)))
@@ -500,7 +501,7 @@ public async Task PostFilterCanTerminateOperationAsync()
// Act
var result = await kernel.InvokePromptAsync("Test prompt", new(new AzureOpenAIPromptExecutionSettings
{
- ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions
+ ToolCallBehavior = AzureOpenAIToolCallBehavior.AutoInvokeKernelFunctions
}));
// Assert
@@ -544,7 +545,7 @@ public async Task PostFilterCanTerminateOperationOnStreamingAsync()
this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingStreamingResponses();
- var executionSettings = new AzureOpenAIPromptExecutionSettings { ToolCallBehavior = AzureToolCallBehavior.AutoInvokeKernelFunctions };
+ var executionSettings = new AzureOpenAIPromptExecutionSettings { ToolCallBehavior = AzureOpenAIToolCallBehavior.AutoInvokeKernelFunctions };
List streamingContent = [];
@@ -582,18 +583,18 @@ public void Dispose()
private static List GetFunctionCallingResponses()
{
return [
- new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("filters_multiple_function_calls_test_response.json")) },
- new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("filters_multiple_function_calls_test_response.json")) },
- new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) }
+ new HttpResponseMessage(HttpStatusCode.OK) { Content = AzureOpenAITestHelper.GetTestResponseAsStream("filters_multiple_function_calls_test_response.json") },
+ new HttpResponseMessage(HttpStatusCode.OK) { Content = AzureOpenAITestHelper.GetTestResponseAsStream("filters_multiple_function_calls_test_response.json") },
+ new HttpResponseMessage(HttpStatusCode.OK) { Content = AzureOpenAITestHelper.GetTestResponseAsStream("chat_completion_test_response.json") }
];
}
private static List GetFunctionCallingStreamingResponses()
{
return [
- new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("filters_streaming_multiple_function_calls_test_response.txt")) },
- new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("filters_streaming_multiple_function_calls_test_response.txt")) },
- new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt")) }
+ new HttpResponseMessage(HttpStatusCode.OK) { Content = AzureOpenAITestHelper.GetTestResponseAsStream("filters_streaming_multiple_function_calls_test_response.txt") },
+ new HttpResponseMessage(HttpStatusCode.OK) { Content = AzureOpenAITestHelper.GetTestResponseAsStream("filters_streaming_multiple_function_calls_test_response.txt") },
+ new HttpResponseMessage(HttpStatusCode.OK) { Content = AzureOpenAITestHelper.GetTestResponseAsStream("chat_completion_streaming_test_response.txt") }
];
}
#pragma warning restore CA2000
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/FunctionCalling/AzureOpenAIFunctionTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/FunctionCalling/AzureOpenAIFunctionTests.cs
index bd268ef67991..cf83f89bc783 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/FunctionCalling/AzureOpenAIFunctionTests.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/FunctionCalling/AzureOpenAIFunctionTests.cs
@@ -4,9 +4,9 @@
using System.ComponentModel;
using System.Linq;
using System.Text.Json;
-using Azure.AI.OpenAI;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+using OpenAI.Chat;
namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.FunctionCalling;
@@ -51,11 +51,11 @@ public void ItCanConvertToFunctionDefinitionWithNoPluginName()
AzureOpenAIFunction sut = KernelFunctionFactory.CreateFromMethod(() => { }, "myfunc", "This is a description of the function.").Metadata.ToAzureOpenAIFunction();
// Act
- FunctionDefinition result = sut.ToFunctionDefinition();
+ ChatTool result = sut.ToFunctionDefinition();
// Assert
- Assert.Equal(sut.FunctionName, result.Name);
- Assert.Equal(sut.Description, result.Description);
+ Assert.Equal(sut.FunctionName, result.FunctionName);
+ Assert.Equal(sut.Description, result.FunctionDescription);
}
[Fact]
@@ -68,7 +68,7 @@ public void ItCanConvertToFunctionDefinitionWithNullParameters()
var result = sut.ToFunctionDefinition();
// Assert
- Assert.Equal("{\"type\":\"object\",\"required\":[],\"properties\":{}}", result.Parameters.ToString());
+ Assert.Equal("{\"type\":\"object\",\"required\":[],\"properties\":{}}", result.FunctionParameters.ToString());
}
[Fact]
@@ -81,11 +81,11 @@ public void ItCanConvertToFunctionDefinitionWithPluginName()
}).GetFunctionsMetadata()[0].ToAzureOpenAIFunction();
// Act
- FunctionDefinition result = sut.ToFunctionDefinition();
+ ChatTool result = sut.ToFunctionDefinition();
// Assert
- Assert.Equal("myplugin-myfunc", result.Name);
- Assert.Equal(sut.Description, result.Description);
+ Assert.Equal("myplugin-myfunc", result.FunctionName);
+ Assert.Equal(sut.Description, result.FunctionDescription);
}
[Fact]
@@ -103,15 +103,15 @@ public void ItCanConvertToFunctionDefinitionsWithParameterTypesAndReturnParamete
AzureOpenAIFunction sut = plugin.GetFunctionsMetadata()[0].ToAzureOpenAIFunction();
- FunctionDefinition functionDefinition = sut.ToFunctionDefinition();
+ ChatTool functionDefinition = sut.ToFunctionDefinition();
var exp = JsonSerializer.Serialize(KernelJsonSchema.Parse(expectedParameterSchema));
- var act = JsonSerializer.Serialize(KernelJsonSchema.Parse(functionDefinition.Parameters));
+ var act = JsonSerializer.Serialize(KernelJsonSchema.Parse(functionDefinition.FunctionParameters));
Assert.NotNull(functionDefinition);
- Assert.Equal("Tests-TestFunction", functionDefinition.Name);
- Assert.Equal("My test function", functionDefinition.Description);
- Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse(expectedParameterSchema)), JsonSerializer.Serialize(KernelJsonSchema.Parse(functionDefinition.Parameters)));
+ Assert.Equal("Tests-TestFunction", functionDefinition.FunctionName);
+ Assert.Equal("My test function", functionDefinition.FunctionDescription);
+ Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse(expectedParameterSchema)), JsonSerializer.Serialize(KernelJsonSchema.Parse(functionDefinition.FunctionParameters)));
}
[Fact]
@@ -129,12 +129,12 @@ public void ItCanConvertToFunctionDefinitionsWithParameterTypesAndNoReturnParame
AzureOpenAIFunction sut = plugin.GetFunctionsMetadata()[0].ToAzureOpenAIFunction();
- FunctionDefinition functionDefinition = sut.ToFunctionDefinition();
+ ChatTool functionDefinition = sut.ToFunctionDefinition();
Assert.NotNull(functionDefinition);
- Assert.Equal("Tests-TestFunction", functionDefinition.Name);
- Assert.Equal("My test function", functionDefinition.Description);
- Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse(expectedParameterSchema)), JsonSerializer.Serialize(KernelJsonSchema.Parse(functionDefinition.Parameters)));
+ Assert.Equal("Tests-TestFunction", functionDefinition.FunctionName);
+ Assert.Equal("My test function", functionDefinition.FunctionDescription);
+ Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse(expectedParameterSchema)), JsonSerializer.Serialize(KernelJsonSchema.Parse(functionDefinition.FunctionParameters)));
}
[Fact]
@@ -146,8 +146,8 @@ public void ItCanConvertToFunctionDefinitionsWithNoParameterTypes()
parameters: [new KernelParameterMetadata("param1")]).Metadata.ToAzureOpenAIFunction();
// Act
- FunctionDefinition result = f.ToFunctionDefinition();
- ParametersData pd = JsonSerializer.Deserialize(result.Parameters.ToString())!;
+ ChatTool result = f.ToFunctionDefinition();
+ ParametersData pd = JsonSerializer.Deserialize(result.FunctionParameters.ToString())!;
// Assert
Assert.NotNull(pd.properties);
@@ -166,8 +166,8 @@ public void ItCanConvertToFunctionDefinitionsWithNoParameterTypesButWithDescript
parameters: [new KernelParameterMetadata("param1") { Description = "something neat" }]).Metadata.ToAzureOpenAIFunction();
// Act
- FunctionDefinition result = f.ToFunctionDefinition();
- ParametersData pd = JsonSerializer.Deserialize(result.Parameters.ToString())!;
+ ChatTool result = f.ToFunctionDefinition();
+ ParametersData pd = JsonSerializer.Deserialize(result.FunctionParameters.ToString())!;
// Assert
Assert.NotNull(pd.properties);
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/FunctionCalling/KernelFunctionMetadataExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/FunctionCalling/KernelFunctionMetadataExtensionsTests.cs
index ebf7b67a2f9b..67cd371dfe23 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/FunctionCalling/KernelFunctionMetadataExtensionsTests.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/FunctionCalling/KernelFunctionMetadataExtensionsTests.cs
@@ -196,7 +196,7 @@ public void ItCanCreateValidAzureOpenAIFunctionManualForPlugin()
Assert.NotNull(result);
Assert.Equal(
"""{"type":"object","required":["parameter1","parameter2","parameter3"],"properties":{"parameter1":{"type":"string","description":"String parameter"},"parameter2":{"type":"string","enum":["Value1","Value2"],"description":"Enum parameter"},"parameter3":{"type":"string","format":"date-time","description":"DateTime parameter"}}}""",
- result.Parameters.ToString()
+ result.FunctionParameters.ToString()
);
}
@@ -231,7 +231,7 @@ public void ItCanCreateValidAzureOpenAIFunctionManualForPrompt()
Assert.NotNull(result);
Assert.Equal(
"""{"type":"object","required":["parameter1","parameter2"],"properties":{"parameter1":{"type":"string","description":"String parameter"},"parameter2":{"enum":["Value1","Value2"],"description":"Enum parameter"}}}""",
- result.Parameters.ToString()
+ result.FunctionParameters.ToString()
);
}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/AddHeaderRequestPolicy.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/AddHeaderRequestPolicy.cs
deleted file mode 100644
index 8303b2ceaeaf..000000000000
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI/AddHeaderRequestPolicy.cs
+++ /dev/null
@@ -1,20 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using Azure.Core;
-using Azure.Core.Pipeline;
-
-namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI;
-
-///
-/// Helper class to inject headers into Azure SDK HTTP pipeline
-///
-internal sealed class AddHeaderRequestPolicy(string headerName, string headerValue) : HttpPipelineSynchronousPolicy
-{
- private readonly string _headerName = headerName;
- private readonly string _headerValue = headerValue;
-
- public override void OnSendingRequest(HttpMessage message)
- {
- message.Request.Headers.Add(this._headerName, this._headerValue);
- }
-}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/AzureOpenAIPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/AzureOpenAIPromptExecutionSettings.cs
index 69c305f58f34..22141ee8aee0 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI/AzureOpenAIPromptExecutionSettings.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/AzureOpenAIPromptExecutionSettings.cs
@@ -6,9 +6,10 @@
using System.Diagnostics.CodeAnalysis;
using System.Text.Json;
using System.Text.Json.Serialization;
-using Azure.AI.OpenAI;
+using Azure.AI.OpenAI.Chat;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Text;
+using OpenAI.Chat;
namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI;
@@ -116,23 +117,6 @@ public IList? StopSequences
}
}
- ///
- /// How many completions to generate for each prompt. Default is 1.
- /// Note: Because this parameter generates many completions, it can quickly consume your token quota.
- /// Use carefully and ensure that you have reasonable settings for max_tokens and stop.
- ///
- [JsonPropertyName("results_per_prompt")]
- public int ResultsPerPrompt
- {
- get => this._resultsPerPrompt;
-
- set
- {
- this.ThrowIfFrozen();
- this._resultsPerPrompt = value;
- }
- }
-
///
/// If specified, the system will make a best effort to sample deterministically such that repeated requests with the
/// same seed and parameters should return the same result. Determinism is not guaranteed.
@@ -153,7 +137,7 @@ public long? Seed
/// Gets or sets the response format to use for the completion.
///
///
- /// Possible values are: "json_object", "text", object.
+ /// Possible values are: "json_object", "text", object.
///
[Experimental("SKEXP0010")]
[JsonPropertyName("response_format")]
@@ -207,18 +191,18 @@ public IDictionary? TokenSelectionBiases
/// - To disable all tool calling, set the property to null (the default).
/// -
/// To request that the model use a specific function, set the property to an instance returned
- /// from .
+ /// from .
///
/// -
/// To allow the model to request one of any number of functions, set the property to an
- /// instance returned from , called with
+ /// instance returned from , called with
/// a list of the functions available.
///
/// -
/// To allow the model to request one of any of the functions in the supplied ,
- /// set the property to if the client should simply
+ /// set the property to if the client should simply
/// send the information about the functions and not handle the response in any special manner, or
- /// if the client should attempt to automatically
+ /// if the client should attempt to automatically
/// invoke the function and send the result back to the service.
///
///
@@ -229,7 +213,7 @@ public IDictionary? TokenSelectionBiases
/// the function, and sending back the result. The intermediate messages will be retained in the
/// if an instance was provided.
///
- public AzureToolCallBehavior? ToolCallBehavior
+ public AzureOpenAIToolCallBehavior? ToolCallBehavior
{
get => this._toolCallBehavior;
@@ -293,14 +277,14 @@ public int? TopLogprobs
///
[Experimental("SKEXP0010")]
[JsonIgnore]
- public AzureChatExtensionsOptions? AzureChatExtensionsOptions
+ public AzureChatDataSource? AzureChatDataSource
{
- get => this._azureChatExtensionsOptions;
+ get => this._azureChatDataSource;
set
{
this.ThrowIfFrozen();
- this._azureChatExtensionsOptions = value;
+ this._azureChatDataSource = value;
}
}
@@ -338,7 +322,6 @@ public override PromptExecutionSettings Clone()
FrequencyPenalty = this.FrequencyPenalty,
MaxTokens = this.MaxTokens,
StopSequences = this.StopSequences is not null ? new List(this.StopSequences) : null,
- ResultsPerPrompt = this.ResultsPerPrompt,
Seed = this.Seed,
ResponseFormat = this.ResponseFormat,
TokenSelectionBiases = this.TokenSelectionBiases is not null ? new Dictionary(this.TokenSelectionBiases) : null,
@@ -347,7 +330,7 @@ public override PromptExecutionSettings Clone()
ChatSystemPrompt = this.ChatSystemPrompt,
Logprobs = this.Logprobs,
TopLogprobs = this.TopLogprobs,
- AzureChatExtensionsOptions = this.AzureChatExtensionsOptions,
+ AzureChatDataSource = this.AzureChatDataSource,
};
}
@@ -417,16 +400,15 @@ public static AzureOpenAIPromptExecutionSettings FromExecutionSettingsWithData(P
private double _frequencyPenalty;
private int? _maxTokens;
private IList? _stopSequences;
- private int _resultsPerPrompt = 1;
private long? _seed;
private object? _responseFormat;
private IDictionary? _tokenSelectionBiases;
- private AzureToolCallBehavior? _toolCallBehavior;
+ private AzureOpenAIToolCallBehavior? _toolCallBehavior;
private string? _user;
private string? _chatSystemPrompt;
private bool? _logprobs;
private int? _topLogprobs;
- private AzureChatExtensionsOptions? _azureChatExtensionsOptions;
+ private AzureChatDataSource? _azureChatDataSource;
#endregion
}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/AzureToolCallBehavior.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/AzureOpenAIToolCallBehavior.cs
similarity index 78%
rename from dotnet/src/Connectors/Connectors.AzureOpenAI/AzureToolCallBehavior.cs
rename to dotnet/src/Connectors/Connectors.AzureOpenAI/AzureOpenAIToolCallBehavior.cs
index 4c3baef49268..e9dbd224b2a0 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI/AzureToolCallBehavior.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/AzureOpenAIToolCallBehavior.cs
@@ -6,12 +6,12 @@
using System.Diagnostics;
using System.Linq;
using System.Text.Json;
-using Azure.AI.OpenAI;
+using OpenAI.Chat;
namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI;
/// Represents a behavior for Azure OpenAI tool calls.
-public abstract class AzureToolCallBehavior
+public abstract class AzureOpenAIToolCallBehavior
{
// NOTE: Right now, the only tools that are available are for function calling. In the future,
// this class can be extended to support additional kinds of tools, including composite ones:
@@ -45,7 +45,7 @@ public abstract class AzureToolCallBehavior
///
/// If no is available, no function information will be provided to the model.
///
- public static AzureToolCallBehavior EnableKernelFunctions { get; } = new KernelFunctions(autoInvoke: false);
+ public static AzureOpenAIToolCallBehavior EnableKernelFunctions { get; } = new KernelFunctions(autoInvoke: false);
///
/// Gets an instance that will both provide all of the 's plugins' function information
@@ -56,16 +56,16 @@ public abstract class AzureToolCallBehavior
/// handling invoking any requested functions and supplying the results back to the model.
/// If no is available, no function information will be provided to the model.
///
- public static AzureToolCallBehavior AutoInvokeKernelFunctions { get; } = new KernelFunctions(autoInvoke: true);
+ public static AzureOpenAIToolCallBehavior AutoInvokeKernelFunctions { get; } = new KernelFunctions(autoInvoke: true);
/// Gets an instance that will provide the specified list of functions to the model.
/// The functions that should be made available to the model.
/// true to attempt to automatically handle function call requests; otherwise, false.
///
- /// The that may be set into
+ /// The that may be set into
/// to indicate that the specified functions should be made available to the model.
///
- public static AzureToolCallBehavior EnableFunctions(IEnumerable functions, bool autoInvoke = false)
+ public static AzureOpenAIToolCallBehavior EnableFunctions(IEnumerable functions, bool autoInvoke = false)
{
Verify.NotNull(functions);
return new EnabledFunctions(functions, autoInvoke);
@@ -75,17 +75,17 @@ public static AzureToolCallBehavior EnableFunctions(IEnumerableThe function the model should request to use.
/// true to attempt to automatically handle function call requests; otherwise, false.
///
- /// The that may be set into
+ /// The that may be set into
/// to indicate that the specified function should be requested by the model.
///
- public static AzureToolCallBehavior RequireFunction(AzureOpenAIFunction function, bool autoInvoke = false)
+ public static AzureOpenAIToolCallBehavior RequireFunction(AzureOpenAIFunction function, bool autoInvoke = false)
{
Verify.NotNull(function);
return new RequiredFunction(function, autoInvoke);
}
/// Initializes the instance; prevents external instantiation.
- private AzureToolCallBehavior(bool autoInvoke)
+ private AzureOpenAIToolCallBehavior(bool autoInvoke)
{
this.MaximumAutoInvokeAttempts = autoInvoke ? DefaultMaximumAutoInvokeAttempts : 0;
}
@@ -118,23 +118,25 @@ private AzureToolCallBehavior(bool autoInvoke)
/// true if it's ok to invoke any kernel function requested by the model if it's found; false if a request needs to be validated against an allow list.
internal virtual bool AllowAnyRequestedKernelFunction => false;
- /// Configures the with any tools this provides.
- /// The used for the operation. This can be queried to determine what tools to provide into the .
- /// The destination to configure.
- internal abstract void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions options);
+ /// Returns list of available tools and the way model should use them.
+ /// The used for the operation. This can be queried to determine what tools to return.
+ internal abstract (IList? Tools, ChatToolChoice? Choice) ConfigureOptions(Kernel? kernel);
///
- /// Represents a that will provide to the model all available functions from a
+ /// Represents a that will provide to the model all available functions from a
/// provided by the client. Setting this will have no effect if no is provided.
///
- internal sealed class KernelFunctions : AzureToolCallBehavior
+ internal sealed class KernelFunctions : AzureOpenAIToolCallBehavior
{
internal KernelFunctions(bool autoInvoke) : base(autoInvoke) { }
public override string ToString() => $"{nameof(KernelFunctions)}(autoInvoke:{this.MaximumAutoInvokeAttempts != 0})";
- internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions options)
+ internal override (IList? Tools, ChatToolChoice? Choice) ConfigureOptions(Kernel? kernel)
{
+ ChatToolChoice? choice = null;
+ List? tools = null;
+
// If no kernel is provided, we don't have any tools to provide.
if (kernel is not null)
{
@@ -142,44 +144,50 @@ internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions o
IList functions = kernel.Plugins.GetFunctionsMetadata();
if (functions.Count > 0)
{
- options.ToolChoice = ChatCompletionsToolChoice.Auto;
+ choice = ChatToolChoice.Auto;
+ tools = [];
for (int i = 0; i < functions.Count; i++)
{
- options.Tools.Add(new ChatCompletionsFunctionToolDefinition(functions[i].ToAzureOpenAIFunction().ToFunctionDefinition()));
+ tools.Add(functions[i].ToAzureOpenAIFunction().ToFunctionDefinition());
}
}
}
+
+ return (tools, choice);
}
internal override bool AllowAnyRequestedKernelFunction => true;
}
///
- /// Represents a that provides a specified list of functions to the model.
+ /// Represents a that provides a specified list of functions to the model.
///
- internal sealed class EnabledFunctions : AzureToolCallBehavior
+ internal sealed class EnabledFunctions : AzureOpenAIToolCallBehavior
{
private readonly AzureOpenAIFunction[] _openAIFunctions;
- private readonly ChatCompletionsFunctionToolDefinition[] _functions;
+ private readonly ChatTool[] _functions;
public EnabledFunctions(IEnumerable functions, bool autoInvoke) : base(autoInvoke)
{
this._openAIFunctions = functions.ToArray();
- var defs = new ChatCompletionsFunctionToolDefinition[this._openAIFunctions.Length];
+ var defs = new ChatTool[this._openAIFunctions.Length];
for (int i = 0; i < defs.Length; i++)
{
- defs[i] = new ChatCompletionsFunctionToolDefinition(this._openAIFunctions[i].ToFunctionDefinition());
+ defs[i] = this._openAIFunctions[i].ToFunctionDefinition();
}
this._functions = defs;
}
- public override string ToString() => $"{nameof(EnabledFunctions)}(autoInvoke:{this.MaximumAutoInvokeAttempts != 0}): {string.Join(", ", this._functions.Select(f => f.Name))}";
+ public override string ToString() => $"{nameof(EnabledFunctions)}(autoInvoke:{this.MaximumAutoInvokeAttempts != 0}): {string.Join(", ", this._functions.Select(f => f.FunctionName))}";
- internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions options)
+ internal override (IList? Tools, ChatToolChoice? Choice) ConfigureOptions(Kernel? kernel)
{
+ ChatToolChoice? choice = null;
+ List? tools = null;
+
AzureOpenAIFunction[] openAIFunctions = this._openAIFunctions;
- ChatCompletionsFunctionToolDefinition[] functions = this._functions;
+ ChatTool[] functions = this._functions;
Debug.Assert(openAIFunctions.Length == functions.Length);
if (openAIFunctions.Length > 0)
@@ -196,7 +204,8 @@ internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions o
throw new KernelException($"Auto-invocation with {nameof(EnabledFunctions)} is not supported when no kernel is provided.");
}
- options.ToolChoice = ChatCompletionsToolChoice.Auto;
+ choice = ChatToolChoice.Auto;
+ tools = [];
for (int i = 0; i < openAIFunctions.Length; i++)
{
// Make sure that if auto-invocation is specified, every enabled function can be found in the kernel.
@@ -211,29 +220,31 @@ internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions o
}
// Add the function.
- options.Tools.Add(functions[i]);
+ tools.Add(functions[i]);
}
}
+
+ return (tools, choice);
}
}
- /// Represents a that requests the model use a specific function.
- internal sealed class RequiredFunction : AzureToolCallBehavior
+ /// Represents a that requests the model use a specific function.
+ internal sealed class RequiredFunction : AzureOpenAIToolCallBehavior
{
private readonly AzureOpenAIFunction _function;
- private readonly ChatCompletionsFunctionToolDefinition _tool;
- private readonly ChatCompletionsToolChoice _choice;
+ private readonly ChatTool _tool;
+ private readonly ChatToolChoice _choice;
public RequiredFunction(AzureOpenAIFunction function, bool autoInvoke) : base(autoInvoke)
{
this._function = function;
- this._tool = new ChatCompletionsFunctionToolDefinition(function.ToFunctionDefinition());
- this._choice = new ChatCompletionsToolChoice(this._tool);
+ this._tool = function.ToFunctionDefinition();
+ this._choice = new ChatToolChoice(this._tool);
}
- public override string ToString() => $"{nameof(RequiredFunction)}(autoInvoke:{this.MaximumAutoInvokeAttempts != 0}): {this._tool.Name}";
+ public override string ToString() => $"{nameof(RequiredFunction)}(autoInvoke:{this.MaximumAutoInvokeAttempts != 0}): {this._tool.FunctionName}";
- internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions options)
+ internal override (IList? Tools, ChatToolChoice? Choice) ConfigureOptions(Kernel? kernel)
{
bool autoInvoke = base.MaximumAutoInvokeAttempts > 0;
@@ -253,8 +264,7 @@ internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions o
throw new KernelException($"The specified {nameof(RequiredFunction)} function {this._function.FullyQualifiedName} is not available in the kernel.");
}
- options.ToolChoice = this._choice;
- options.Tools.Add(this._tool);
+ return ([this._tool], this._choice);
}
/// Gets how many requests are part of a single interaction should include this tool in the request.
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/ChatCompletion/AzureOpenAIChatCompletionService.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/ChatCompletion/AzureOpenAIChatCompletionService.cs
index e478a301d947..9d771c4f7abb 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI/ChatCompletion/AzureOpenAIChatCompletionService.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/ChatCompletion/AzureOpenAIChatCompletionService.cs
@@ -10,6 +10,7 @@
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Services;
using Microsoft.SemanticKernel.TextGeneration;
+using OpenAI;
namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI;
@@ -73,7 +74,7 @@ public AzureOpenAIChatCompletionService(
/// The to use for logging. If null, no logging will be performed.
public AzureOpenAIChatCompletionService(
string deploymentName,
- OpenAIClient openAIClient,
+ AzureOpenAIClient openAIClient,
string? modelId = null,
ILoggerFactory? loggerFactory = null)
{
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/RequestFailedExceptionExtensions.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/ClientResultExceptionExtensions.cs
similarity index 78%
rename from dotnet/src/Connectors/Connectors.AzureOpenAI/RequestFailedExceptionExtensions.cs
rename to dotnet/src/Connectors/Connectors.AzureOpenAI/ClientResultExceptionExtensions.cs
index 3857d0191fbe..fd282797e879 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI/RequestFailedExceptionExtensions.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/ClientResultExceptionExtensions.cs
@@ -1,21 +1,22 @@
// Copyright (c) Microsoft. All rights reserved.
+using System.ClientModel;
using System.Net;
using Azure;
namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI;
///
-/// Provides extension methods for the class.
+/// Provides extension methods for the class.
///
-internal static class RequestFailedExceptionExtensions
+internal static class ClientResultExceptionExtensions
{
///
- /// Converts a to an .
+ /// Converts a to an .
///
/// The original .
/// An instance.
- public static HttpOperationException ToHttpOperationException(this RequestFailedException exception)
+ public static HttpOperationException ToHttpOperationException(this ClientResultException exception)
{
const int NoResponseReceived = 0;
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Connectors.AzureOpenAI.csproj b/dotnet/src/Connectors/Connectors.AzureOpenAI/Connectors.AzureOpenAI.csproj
index 8e8f53594708..35c31788610d 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI/Connectors.AzureOpenAI.csproj
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Connectors.AzureOpenAI.csproj
@@ -13,6 +13,7 @@
+
@@ -25,7 +26,7 @@
-
+
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIChatMessageContent.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIChatMessageContent.cs
index 8cbecc909951..ff7183cb0b12 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIChatMessageContent.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIChatMessageContent.cs
@@ -2,8 +2,9 @@
using System.Collections.Generic;
using System.Linq;
-using Azure.AI.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI.Chat;
+using OpenAIChatCompletion = OpenAI.Chat.ChatCompletion;
namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI;
@@ -13,28 +14,28 @@ namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI;
public sealed class AzureOpenAIChatMessageContent : ChatMessageContent
{
///
- /// Gets the metadata key for the name property.
+ /// Gets the metadata key for the tool id.
///
- public static string ToolIdProperty => $"{nameof(ChatCompletionsToolCall)}.{nameof(ChatCompletionsToolCall.Id)}";
+ public static string ToolIdProperty => "ChatCompletionsToolCall.Id";
///
- /// Gets the metadata key for the list of .
+ /// Gets the metadata key for the list of .
///
- internal static string FunctionToolCallsProperty => $"{nameof(ChatResponseMessage)}.FunctionToolCalls";
+ internal static string FunctionToolCallsProperty => "ChatResponseMessage.FunctionToolCalls";
///
/// Initializes a new instance of the class.
///
- internal AzureOpenAIChatMessageContent(ChatResponseMessage chatMessage, string modelId, IReadOnlyDictionary? metadata = null)
- : base(new AuthorRole(chatMessage.Role.ToString()), chatMessage.Content, modelId, chatMessage, System.Text.Encoding.UTF8, CreateMetadataDictionary(chatMessage.ToolCalls, metadata))
+ internal AzureOpenAIChatMessageContent(OpenAIChatCompletion completion, string modelId, IReadOnlyDictionary? metadata = null)
+ : base(new AuthorRole(completion.Role.ToString()), CreateContentItems(completion.Content), modelId, completion, System.Text.Encoding.UTF8, CreateMetadataDictionary(completion.ToolCalls, metadata))
{
- this.ToolCalls = chatMessage.ToolCalls;
+ this.ToolCalls = completion.ToolCalls;
}
///
/// Initializes a new instance of the class.
///
- internal AzureOpenAIChatMessageContent(ChatRole role, string? content, string modelId, IReadOnlyList toolCalls, IReadOnlyDictionary? metadata = null)
+ internal AzureOpenAIChatMessageContent(ChatMessageRole role, string? content, string modelId, IReadOnlyList toolCalls, IReadOnlyDictionary? metadata = null)
: base(new AuthorRole(role.ToString()), content, modelId, content, System.Text.Encoding.UTF8, CreateMetadataDictionary(toolCalls, metadata))
{
this.ToolCalls = toolCalls;
@@ -43,16 +44,32 @@ internal AzureOpenAIChatMessageContent(ChatRole role, string? content, string mo
///
/// Initializes a new instance of the class.
///
- internal AzureOpenAIChatMessageContent(AuthorRole role, string? content, string modelId, IReadOnlyList toolCalls, IReadOnlyDictionary? metadata = null)
+ internal AzureOpenAIChatMessageContent(AuthorRole role, string? content, string modelId, IReadOnlyList toolCalls, IReadOnlyDictionary? metadata = null)
: base(role, content, modelId, content, System.Text.Encoding.UTF8, CreateMetadataDictionary(toolCalls, metadata))
{
this.ToolCalls = toolCalls;
}
+ private static ChatMessageContentItemCollection CreateContentItems(IReadOnlyList contentUpdate)
+ {
+ ChatMessageContentItemCollection collection = [];
+
+ foreach (var part in contentUpdate)
+ {
+ // We only support text content for now.
+ if (part.Kind == ChatMessageContentPartKind.Text)
+ {
+ collection.Add(new TextContent(part.Text));
+ }
+ }
+
+ return collection;
+ }
+
///
/// A list of the tools called by the model.
///
- public IReadOnlyList ToolCalls { get; }
+ public IReadOnlyList ToolCalls { get; }
///
/// Retrieve the resulting function from the chat result.
@@ -64,7 +81,7 @@ public IReadOnlyList GetOpenAIFunctionToolCalls()
foreach (var toolCall in this.ToolCalls)
{
- if (toolCall is ChatCompletionsFunctionToolCall functionToolCall)
+ if (toolCall is ChatToolCall functionToolCall)
{
(functionToolCallList ??= []).Add(new AzureOpenAIFunctionToolCall(functionToolCall));
}
@@ -79,7 +96,7 @@ public IReadOnlyList GetOpenAIFunctionToolCalls()
}
private static IReadOnlyDictionary? CreateMetadataDictionary(
- IReadOnlyList toolCalls,
+ IReadOnlyList toolCalls,
IReadOnlyDictionary? original)
{
// We only need to augment the metadata if there are any tool calls.
@@ -107,7 +124,7 @@ public IReadOnlyList GetOpenAIFunctionToolCalls()
}
// Add the additional entry.
- newDictionary.Add(FunctionToolCallsProperty, toolCalls.OfType().ToList());
+ newDictionary.Add(FunctionToolCallsProperty, toolCalls.Where(ctc => ctc.Kind == ChatToolCallKind.Function).ToList());
return newDictionary;
}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIClientCore.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIClientCore.cs
index e34b191a83b8..c37321e48c4d 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIClientCore.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIClientCore.cs
@@ -2,7 +2,6 @@
using System;
using System.Net.Http;
-using Azure;
using Azure.AI.OpenAI;
using Azure.Core;
using Microsoft.Extensions.Logging;
@@ -23,7 +22,7 @@ internal sealed class AzureOpenAIClientCore : ClientCore
///
/// OpenAI / Azure OpenAI Client
///
- internal override OpenAIClient Client { get; }
+ internal override AzureOpenAIClient Client { get; }
///
/// Initializes a new instance of the class using API Key authentication.
@@ -49,7 +48,7 @@ internal AzureOpenAIClientCore(
this.DeploymentOrModelName = deploymentName;
this.Endpoint = new Uri(endpoint);
- this.Client = new OpenAIClient(this.Endpoint, new AzureKeyCredential(apiKey), options);
+ this.Client = new AzureOpenAIClient(this.Endpoint, apiKey, options);
}
///
@@ -75,7 +74,7 @@ internal AzureOpenAIClientCore(
this.DeploymentOrModelName = deploymentName;
this.Endpoint = new Uri(endpoint);
- this.Client = new OpenAIClient(this.Endpoint, credential, options);
+ this.Client = new AzureOpenAIClient(this.Endpoint, credential, options);
}
///
@@ -84,11 +83,11 @@ internal AzureOpenAIClientCore(
/// it's up to the caller to configure the client.
///
/// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource
- /// Custom .
+ /// Custom .
/// The to use for logging. If null, no logging will be performed.
internal AzureOpenAIClientCore(
string deploymentName,
- OpenAIClient openAIClient,
+ AzureOpenAIClient openAIClient,
ILogger? logger = null) : base(logger)
{
Verify.NotNullOrWhiteSpace(deploymentName);
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIFunction.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIFunction.cs
index 4a3cff49103d..0089b6c29041 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIFunction.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIFunction.cs
@@ -2,7 +2,7 @@
using System;
using System.Collections.Generic;
-using Azure.AI.OpenAI;
+using OpenAI.Chat;
namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI;
@@ -124,10 +124,10 @@ internal AzureOpenAIFunction(
///
/// Converts the representation to the Azure SDK's
- /// representation.
+ /// representation.
///
- /// A containing all the function information.
- public FunctionDefinition ToFunctionDefinition()
+ /// A containing all the function information.
+ public ChatTool ToFunctionDefinition()
{
BinaryData resultParameters = s_zeroFunctionParametersSchema;
@@ -155,12 +155,12 @@ public FunctionDefinition ToFunctionDefinition()
});
}
- return new FunctionDefinition
- {
- Name = this.FullyQualifiedName,
- Description = this.Description,
- Parameters = resultParameters,
- };
+ return ChatTool.CreateFunctionTool
+ (
+ functionName: this.FullyQualifiedName,
+ functionDescription: this.Description,
+ functionParameters: resultParameters
+ );
}
/// Gets a for a typeless parameter with the specified description, defaulting to typeof(string)
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIFunctionToolCall.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIFunctionToolCall.cs
index bea73a474d37..e618f27a9b15 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIFunctionToolCall.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIFunctionToolCall.cs
@@ -5,7 +5,7 @@
using System.Diagnostics;
using System.Text;
using System.Text.Json;
-using Azure.AI.OpenAI;
+using OpenAI.Chat;
namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI;
@@ -16,15 +16,15 @@ public sealed class AzureOpenAIFunctionToolCall
{
private string? _fullyQualifiedFunctionName;
- /// Initialize the from a .
- internal AzureOpenAIFunctionToolCall(ChatCompletionsFunctionToolCall functionToolCall)
+ /// Initialize the from a .
+ internal AzureOpenAIFunctionToolCall(ChatToolCall functionToolCall)
{
Verify.NotNull(functionToolCall);
- Verify.NotNull(functionToolCall.Name);
+ Verify.NotNull(functionToolCall.FunctionName);
- string fullyQualifiedFunctionName = functionToolCall.Name;
+ string fullyQualifiedFunctionName = functionToolCall.FunctionName;
string functionName = fullyQualifiedFunctionName;
- string? arguments = functionToolCall.Arguments;
+ string? arguments = functionToolCall.FunctionArguments;
string? pluginName = null;
int separatorPos = fullyQualifiedFunctionName.IndexOf(AzureOpenAIFunction.NameSeparator, StringComparison.Ordinal);
@@ -89,43 +89,43 @@ public override string ToString()
///
/// Tracks tooling updates from streaming responses.
///
- /// The tool call update to incorporate.
+ /// The tool call updates to incorporate.
/// Lazily-initialized dictionary mapping indices to IDs.
/// Lazily-initialized dictionary mapping indices to names.
/// Lazily-initialized dictionary mapping indices to arguments.
internal static void TrackStreamingToolingUpdate(
- StreamingToolCallUpdate? update,
+ IReadOnlyList? updates,
ref Dictionary? toolCallIdsByIndex,
ref Dictionary? functionNamesByIndex,
ref Dictionary? functionArgumentBuildersByIndex)
{
- if (update is null)
+ if (updates is null)
{
// Nothing to track.
return;
}
- // If we have an ID, ensure the index is being tracked. Even if it's not a function update,
- // we want to keep track of it so we can send back an error.
- if (update.Id is string id)
+ foreach (var update in updates)
{
- (toolCallIdsByIndex ??= [])[update.ToolCallIndex] = id;
- }
+ // If we have an ID, ensure the index is being tracked. Even if it's not a function update,
+ // we want to keep track of it so we can send back an error.
+ if (update.Id is string id)
+ {
+ (toolCallIdsByIndex ??= [])[update.Index] = id;
+ }
- if (update is StreamingFunctionToolCallUpdate ftc)
- {
// Ensure we're tracking the function's name.
- if (ftc.Name is string name)
+ if (update.FunctionName is string name)
{
- (functionNamesByIndex ??= [])[ftc.ToolCallIndex] = name;
+ (functionNamesByIndex ??= [])[update.Index] = name;
}
// Ensure we're tracking the function's arguments.
- if (ftc.ArgumentsUpdate is string argumentsUpdate)
+ if (update.FunctionArgumentsUpdate is string argumentsUpdate)
{
- if (!(functionArgumentBuildersByIndex ??= []).TryGetValue(ftc.ToolCallIndex, out StringBuilder? arguments))
+ if (!(functionArgumentBuildersByIndex ??= []).TryGetValue(update.Index, out StringBuilder? arguments))
{
- functionArgumentBuildersByIndex[ftc.ToolCallIndex] = arguments = new();
+ functionArgumentBuildersByIndex[update.Index] = arguments = new();
}
arguments.Append(argumentsUpdate);
@@ -134,20 +134,20 @@ internal static void TrackStreamingToolingUpdate(
}
///
- /// Converts the data built up by into an array of s.
+ /// Converts the data built up by into an array of s.
///
/// Dictionary mapping indices to IDs.
/// Dictionary mapping indices to names.
/// Dictionary mapping indices to arguments.
- internal static ChatCompletionsFunctionToolCall[] ConvertToolCallUpdatesToChatCompletionsFunctionToolCalls(
+ internal static ChatToolCall[] ConvertToolCallUpdatesToChatCompletionsFunctionToolCalls(
ref Dictionary? toolCallIdsByIndex,
ref Dictionary? functionNamesByIndex,
ref Dictionary? functionArgumentBuildersByIndex)
{
- ChatCompletionsFunctionToolCall[] toolCalls = [];
+ ChatToolCall[] toolCalls = [];
if (toolCallIdsByIndex is { Count: > 0 })
{
- toolCalls = new ChatCompletionsFunctionToolCall[toolCallIdsByIndex.Count];
+ toolCalls = new ChatToolCall[toolCallIdsByIndex.Count];
int i = 0;
foreach (KeyValuePair toolCallIndexAndId in toolCallIdsByIndex)
@@ -158,7 +158,7 @@ internal static ChatCompletionsFunctionToolCall[] ConvertToolCallUpdatesToChatCo
functionNamesByIndex?.TryGetValue(toolCallIndexAndId.Key, out functionName);
functionArgumentBuildersByIndex?.TryGetValue(toolCallIndexAndId.Key, out functionArguments);
- toolCalls[i] = new ChatCompletionsFunctionToolCall(toolCallIndexAndId.Value, functionName ?? string.Empty, functionArguments?.ToString() ?? string.Empty);
+ toolCalls[i] = ChatToolCall.CreateFunctionToolCall(toolCallIndexAndId.Value, functionName ?? string.Empty, functionArguments?.ToString() ?? string.Empty);
i++;
}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIPluginCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIPluginCollectionExtensions.cs
index c667183f773c..c903127089dd 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIPluginCollectionExtensions.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIPluginCollectionExtensions.cs
@@ -1,7 +1,7 @@
// Copyright (c) Microsoft. All rights reserved.
using System.Diagnostics.CodeAnalysis;
-using Azure.AI.OpenAI;
+using OpenAI.Chat;
namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI;
@@ -20,7 +20,7 @@ public static class AzureOpenAIPluginCollectionExtensions
/// if the function was found; otherwise, .
public static bool TryGetFunctionAndArguments(
this IReadOnlyKernelPluginCollection plugins,
- ChatCompletionsFunctionToolCall functionToolCall,
+ ChatToolCall functionToolCall,
[NotNullWhen(true)] out KernelFunction? function,
out KernelArguments? arguments) =>
plugins.TryGetFunctionAndArguments(new AzureOpenAIFunctionToolCall(functionToolCall), out function, out arguments);
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIStreamingChatMessageContent.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIStreamingChatMessageContent.cs
index c1843b185f89..9287499e1621 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIStreamingChatMessageContent.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureOpenAIStreamingChatMessageContent.cs
@@ -2,8 +2,8 @@
using System.Collections.Generic;
using System.Text;
-using Azure.AI.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI.Chat;
namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI;
@@ -18,7 +18,7 @@ public sealed class AzureOpenAIStreamingChatMessageContent : StreamingChatMessag
///
/// The reason why the completion finished.
///
- public CompletionsFinishReason? FinishReason { get; set; }
+ public ChatFinishReason? FinishReason { get; set; }
///
/// Create a new instance of the class.
@@ -28,21 +28,22 @@ public sealed class AzureOpenAIStreamingChatMessageContent : StreamingChatMessag
/// The model ID used to generate the content
/// Additional metadata
internal AzureOpenAIStreamingChatMessageContent(
- StreamingChatCompletionsUpdate chatUpdate,
+ StreamingChatCompletionUpdate chatUpdate,
int choiceIndex,
string modelId,
IReadOnlyDictionary? metadata = null)
: base(
chatUpdate.Role.HasValue ? new AuthorRole(chatUpdate.Role.Value.ToString()) : null,
- chatUpdate.ContentUpdate,
+ null,
chatUpdate,
choiceIndex,
modelId,
Encoding.UTF8,
metadata)
{
- this.ToolCallUpdate = chatUpdate.ToolCallUpdate;
- this.FinishReason = chatUpdate?.FinishReason;
+ this.ToolCallUpdate = chatUpdate.ToolCallUpdates;
+ this.FinishReason = chatUpdate.FinishReason;
+ this.Items = CreateContentItems(chatUpdate.ContentUpdate);
}
///
@@ -58,8 +59,8 @@ internal AzureOpenAIStreamingChatMessageContent(
internal AzureOpenAIStreamingChatMessageContent(
AuthorRole? authorRole,
string? content,
- StreamingToolCallUpdate? tootToolCallUpdate = null,
- CompletionsFinishReason? completionsFinishReason = null,
+ IReadOnlyList? tootToolCallUpdate = null,
+ ChatFinishReason? completionsFinishReason = null,
int choiceIndex = 0,
string? modelId = null,
IReadOnlyDictionary? metadata = null)
@@ -77,11 +78,27 @@ internal AzureOpenAIStreamingChatMessageContent(
}
/// Gets any update information in the message about a tool call.
- public StreamingToolCallUpdate? ToolCallUpdate { get; }
+ public IReadOnlyList? ToolCallUpdate { get; }
///
public override byte[] ToByteArray() => this.Encoding.GetBytes(this.ToString());
///
public override string ToString() => this.Content ?? string.Empty;
+
+ private static StreamingKernelContentItemCollection CreateContentItems(IReadOnlyList contentUpdate)
+ {
+ StreamingKernelContentItemCollection collection = [];
+
+ foreach (var content in contentUpdate)
+ {
+ // We only support text content for now.
+ if (content.Kind == ChatMessageContentPartKind.Text)
+ {
+ collection.Add(new StreamingTextContent(content.Text));
+ }
+ }
+
+ return collection;
+ }
}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/ClientCore.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/ClientCore.cs
index dda7578da8ea..6486d7348144 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/ClientCore.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/ClientCore.cs
@@ -1,6 +1,8 @@
// Copyright (c) Microsoft. All rights reserved.
using System;
+using System.ClientModel;
+using System.ClientModel.Primitives;
using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.Metrics;
@@ -11,15 +13,17 @@
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
-using Azure;
using Azure.AI.OpenAI;
-using Azure.Core;
-using Azure.Core.Pipeline;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Diagnostics;
using Microsoft.SemanticKernel.Http;
+using OpenAI;
+using OpenAI.Audio;
+using OpenAI.Chat;
+using OpenAI.Embeddings;
+using OpenAIChatCompletion = OpenAI.Chat.ChatCompletion;
#pragma warning disable CA2208 // Instantiate argument exceptions correctly
@@ -30,8 +34,11 @@ namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI;
///
internal abstract class ClientCore
{
+ private const string PromptFilterResultsMetadataKey = "PromptFilterResults";
+ private const string ContentFilterResultsMetadataKey = "ContentFilterResults";
+ private const string LogProbabilityInfoMetadataKey = "LogProbabilityInfo";
private const string ModelProvider = "openai";
- private const int MaxResultsPerPrompt = 128;
+ private record ToolCallingConfig(IList? Tools, ChatToolChoice Choice, bool AutoInvoke);
///
/// The maximum number of auto-invokes that can be in-flight at any given time as part of the current
@@ -52,7 +59,7 @@ internal abstract class ClientCore
private const int MaxInflightAutoInvokes = 128;
/// Singleton tool used when tool call count drops to 0 but we need to supply tools to keep the service happy.
- private static readonly ChatCompletionsFunctionToolDefinition s_nonInvocableFunctionTool = new() { Name = "NonInvocableTool" };
+ private static readonly ChatTool s_nonInvocableFunctionTool = ChatTool.CreateFunctionTool("NonInvocableTool");
/// Tracking for .
private static readonly AsyncLocal s_inflightAutoInvokes = new();
@@ -70,7 +77,7 @@ internal ClientCore(ILogger? logger = null)
///
/// OpenAI / Azure OpenAI Client
///
- internal abstract OpenAIClient Client { get; }
+ internal abstract AzureOpenAIClient Client { get; }
internal Uri? Endpoint { get; set; } = null;
@@ -116,171 +123,35 @@ internal ClientCore(ILogger? logger = null)
unit: "{token}",
description: "Number of tokens used");
- ///
- /// Creates completions for the prompt and settings.
- ///
- /// The prompt to complete.
- /// Execution settings for the completion API.
- /// The containing services, plugins, and other state for use throughout the operation.
- /// The to monitor for cancellation requests. The default is .
- /// Completions generated by the remote model
- internal async Task> GetTextResultsAsync(
- string prompt,
- PromptExecutionSettings? executionSettings,
- Kernel? kernel,
- CancellationToken cancellationToken = default)
- {
- AzureOpenAIPromptExecutionSettings textExecutionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings, AzureOpenAIPromptExecutionSettings.DefaultTextMaxTokens);
-
- ValidateMaxTokens(textExecutionSettings.MaxTokens);
-
- var options = CreateCompletionsOptions(prompt, textExecutionSettings, this.DeploymentOrModelName);
-
- Completions? responseData = null;
- List responseContent;
- using (var activity = ModelDiagnostics.StartCompletionActivity(this.Endpoint, this.DeploymentOrModelName, ModelProvider, prompt, textExecutionSettings))
- {
- try
- {
- responseData = (await RunRequestAsync(() => this.Client.GetCompletionsAsync(options, cancellationToken)).ConfigureAwait(false)).Value;
- if (responseData.Choices.Count == 0)
- {
- throw new KernelException("Text completions not found");
- }
- }
- catch (Exception ex) when (activity is not null)
- {
- activity.SetError(ex);
- if (responseData != null)
- {
- // Capture available metadata even if the operation failed.
- activity
- .SetResponseId(responseData.Id)
- .SetPromptTokenUsage(responseData.Usage.PromptTokens)
- .SetCompletionTokenUsage(responseData.Usage.CompletionTokens);
- }
- throw;
- }
-
- responseContent = responseData.Choices.Select(choice => new TextContent(choice.Text, this.DeploymentOrModelName, choice, Encoding.UTF8, GetTextChoiceMetadata(responseData, choice))).ToList();
- activity?.SetCompletionResponse(responseContent, responseData.Usage.PromptTokens, responseData.Usage.CompletionTokens);
- }
-
- this.LogUsage(responseData.Usage);
-
- return responseContent;
- }
-
- internal async IAsyncEnumerable GetStreamingTextContentsAsync(
- string prompt,
- PromptExecutionSettings? executionSettings,
- Kernel? kernel,
- [EnumeratorCancellation] CancellationToken cancellationToken = default)
- {
- AzureOpenAIPromptExecutionSettings textExecutionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings, AzureOpenAIPromptExecutionSettings.DefaultTextMaxTokens);
-
- ValidateMaxTokens(textExecutionSettings.MaxTokens);
-
- var options = CreateCompletionsOptions(prompt, textExecutionSettings, this.DeploymentOrModelName);
-
- using var activity = ModelDiagnostics.StartCompletionActivity(this.Endpoint, this.DeploymentOrModelName, ModelProvider, prompt, textExecutionSettings);
-
- StreamingResponse response;
- try
- {
- response = await RunRequestAsync(() => this.Client.GetCompletionsStreamingAsync(options, cancellationToken)).ConfigureAwait(false);
- }
- catch (Exception ex) when (activity is not null)
- {
- activity.SetError(ex);
- throw;
- }
-
- var responseEnumerator = response.ConfigureAwait(false).GetAsyncEnumerator();
- List? streamedContents = activity is not null ? [] : null;
- try
- {
- while (true)
- {
- try
- {
- if (!await responseEnumerator.MoveNextAsync())
- {
- break;
- }
- }
- catch (Exception ex) when (activity is not null)
- {
- activity.SetError(ex);
- throw;
- }
-
- Completions completions = responseEnumerator.Current;
- foreach (Choice choice in completions.Choices)
- {
- var openAIStreamingTextContent = new AzureOpenAIStreamingTextContent(
- choice.Text, choice.Index, this.DeploymentOrModelName, choice, GetTextChoiceMetadata(completions, choice));
- streamedContents?.Add(openAIStreamingTextContent);
- yield return openAIStreamingTextContent;
- }
- }
- }
- finally
- {
- activity?.EndStreaming(streamedContents);
- await responseEnumerator.DisposeAsync();
- }
- }
-
- private static Dictionary GetTextChoiceMetadata(Completions completions, Choice choice)
- {
- return new Dictionary(8)
- {
- { nameof(completions.Id), completions.Id },
- { nameof(completions.Created), completions.Created },
- { nameof(completions.PromptFilterResults), completions.PromptFilterResults },
- { nameof(completions.Usage), completions.Usage },
- { nameof(choice.ContentFilterResults), choice.ContentFilterResults },
-
- // Serialization of this struct behaves as an empty object {}, need to cast to string to avoid it.
- { nameof(choice.FinishReason), choice.FinishReason?.ToString() },
-
- { nameof(choice.LogProbabilityModel), choice.LogProbabilityModel },
- { nameof(choice.Index), choice.Index },
- };
- }
-
- private static Dictionary GetChatChoiceMetadata(ChatCompletions completions, ChatChoice chatChoice)
+ private static Dictionary GetChatChoiceMetadata(OpenAIChatCompletion completions)
{
+#pragma warning disable AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
return new Dictionary(12)
{
{ nameof(completions.Id), completions.Id },
- { nameof(completions.Created), completions.Created },
- { nameof(completions.PromptFilterResults), completions.PromptFilterResults },
+ { nameof(completions.CreatedAt), completions.CreatedAt },
+ { PromptFilterResultsMetadataKey, completions.GetContentFilterResultForPrompt() },
{ nameof(completions.SystemFingerprint), completions.SystemFingerprint },
{ nameof(completions.Usage), completions.Usage },
- { nameof(chatChoice.ContentFilterResults), chatChoice.ContentFilterResults },
+ { ContentFilterResultsMetadataKey, completions.GetContentFilterResultForResponse() },
// Serialization of this struct behaves as an empty object {}, need to cast to string to avoid it.
- { nameof(chatChoice.FinishReason), chatChoice.FinishReason?.ToString() },
-
- { nameof(chatChoice.FinishDetails), chatChoice.FinishDetails },
- { nameof(chatChoice.LogProbabilityInfo), chatChoice.LogProbabilityInfo },
- { nameof(chatChoice.Index), chatChoice.Index },
- { nameof(chatChoice.Enhancements), chatChoice.Enhancements },
+ { nameof(completions.FinishReason), completions.FinishReason.ToString() },
+ { LogProbabilityInfoMetadataKey, completions.ContentTokenLogProbabilities },
};
+#pragma warning restore AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
}
- private static Dictionary GetResponseMetadata(StreamingChatCompletionsUpdate completions)
+ private static Dictionary GetResponseMetadata(StreamingChatCompletionUpdate completionUpdate)
{
return new Dictionary(4)
{
- { nameof(completions.Id), completions.Id },
- { nameof(completions.Created), completions.Created },
- { nameof(completions.SystemFingerprint), completions.SystemFingerprint },
+ { nameof(completionUpdate.Id), completionUpdate.Id },
+ { nameof(completionUpdate.CreatedAt), completionUpdate.CreatedAt },
+ { nameof(completionUpdate.SystemFingerprint), completionUpdate.SystemFingerprint },
// Serialization of this struct behaves as an empty object {}, need to cast to string to avoid it.
- { nameof(completions.FinishReason), completions.FinishReason?.ToString() },
+ { nameof(completionUpdate.FinishReason), completionUpdate.FinishReason?.ToString() },
};
}
@@ -312,13 +183,13 @@ internal async Task>> GetEmbeddingsAsync(
if (data.Count > 0)
{
- var embeddingsOptions = new EmbeddingsOptions(this.DeploymentOrModelName, data)
+ var embeddingsOptions = new EmbeddingGenerationOptions()
{
Dimensions = dimensions
};
- var response = await RunRequestAsync(() => this.Client.GetEmbeddingsAsync(embeddingsOptions, cancellationToken)).ConfigureAwait(false);
- var embeddings = response.Value.Data;
+ var response = await RunRequestAsync(() => this.Client.GetEmbeddingClient(this.DeploymentOrModelName).GenerateEmbeddingsAsync(data, embeddingsOptions, cancellationToken)).ConfigureAwait(false);
+ var embeddings = response.Value;
if (embeddings.Count != data.Count)
{
@@ -327,7 +198,7 @@ internal async Task>> GetEmbeddingsAsync(
for (var i = 0; i < embeddings.Count; i++)
{
- result.Add(embeddings[i].Embedding);
+ result.Add(embeddings[i].Vector);
}
}
@@ -382,30 +253,36 @@ internal async Task> GetChatMessageContentsAsy
{
Verify.NotNull(chat);
+ if (this.Logger.IsEnabled(LogLevel.Trace))
+ {
+ this.Logger.LogTrace("ChatHistory: {ChatHistory}, Settings: {Settings}",
+ JsonSerializer.Serialize(chat),
+ JsonSerializer.Serialize(executionSettings));
+ }
+
// Convert the incoming execution settings to OpenAI settings.
AzureOpenAIPromptExecutionSettings chatExecutionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings);
- bool autoInvoke = kernel is not null && chatExecutionSettings.ToolCallBehavior?.MaximumAutoInvokeAttempts > 0 && s_inflightAutoInvokes.Value < MaxInflightAutoInvokes;
+
ValidateMaxTokens(chatExecutionSettings.MaxTokens);
- ValidateAutoInvoke(autoInvoke, chatExecutionSettings.ResultsPerPrompt);
- // Create the Azure SDK ChatCompletionOptions instance from all available information.
- var chatOptions = this.CreateChatCompletionsOptions(chatExecutionSettings, chat, kernel, this.DeploymentOrModelName);
+ var chatMessages = CreateChatCompletionMessages(chatExecutionSettings, chat);
- for (int requestIndex = 1; ; requestIndex++)
+ for (int requestIndex = 0; ; requestIndex++)
{
+ var toolCallingConfig = this.GetToolCallingConfiguration(kernel, chatExecutionSettings, requestIndex);
+
+ var chatOptions = this.CreateChatCompletionsOptions(chatExecutionSettings, chat, toolCallingConfig, kernel);
+
// Make the request.
- ChatCompletions? responseData = null;
- List responseContent;
+ OpenAIChatCompletion? responseData = null;
+ AzureOpenAIChatMessageContent responseContent;
using (var activity = ModelDiagnostics.StartCompletionActivity(this.Endpoint, this.DeploymentOrModelName, ModelProvider, chat, chatExecutionSettings))
{
try
{
- responseData = (await RunRequestAsync(() => this.Client.GetChatCompletionsAsync(chatOptions, cancellationToken)).ConfigureAwait(false)).Value;
+ responseData = (await RunRequestAsync(() => this.Client.GetChatClient(this.DeploymentOrModelName).CompleteChatAsync(chatMessages, chatOptions, cancellationToken)).ConfigureAwait(false)).Value;
+
this.LogUsage(responseData.Usage);
- if (responseData.Choices.Count == 0)
- {
- throw new KernelException("Chat completions not found");
- }
}
catch (Exception ex) when (activity is not null)
{
@@ -415,21 +292,20 @@ internal async Task> GetChatMessageContentsAsy
// Capture available metadata even if the operation failed.
activity
.SetResponseId(responseData.Id)
- .SetPromptTokenUsage(responseData.Usage.PromptTokens)
- .SetCompletionTokenUsage(responseData.Usage.CompletionTokens);
+ .SetPromptTokenUsage(responseData.Usage.InputTokens)
+ .SetCompletionTokenUsage(responseData.Usage.OutputTokens);
}
throw;
}
- responseContent = responseData.Choices.Select(chatChoice => this.GetChatMessage(chatChoice, responseData)).ToList();
- activity?.SetCompletionResponse(responseContent, responseData.Usage.PromptTokens, responseData.Usage.CompletionTokens);
+ responseContent = this.GetChatMessage(responseData);
+ activity?.SetCompletionResponse([responseContent], responseData.Usage.InputTokens, responseData.Usage.OutputTokens);
}
// If we don't want to attempt to invoke any functions, just return the result.
- // Or if we are auto-invoking but we somehow end up with other than 1 choice even though only 1 was requested, similarly bail.
- if (!autoInvoke || responseData.Choices.Count != 1)
+ if (!toolCallingConfig.AutoInvoke)
{
- return responseContent;
+ return [responseContent];
}
Debug.Assert(kernel is not null);
@@ -439,51 +315,49 @@ internal async Task> GetChatMessageContentsAsy
// Note that we don't check the FinishReason and instead check whether there are any tool calls, as the service
// may return a FinishReason of "stop" even if there are tool calls to be made, in particular if a required tool
// is specified.
- ChatChoice resultChoice = responseData.Choices[0];
- AzureOpenAIChatMessageContent result = this.GetChatMessage(resultChoice, responseData);
- if (result.ToolCalls.Count == 0)
+ if (responseData.ToolCalls.Count == 0)
{
- return [result];
+ return [responseContent];
}
if (this.Logger.IsEnabled(LogLevel.Debug))
{
- this.Logger.LogDebug("Tool requests: {Requests}", result.ToolCalls.Count);
+ this.Logger.LogDebug("Tool requests: {Requests}", responseData.ToolCalls.Count);
}
if (this.Logger.IsEnabled(LogLevel.Trace))
{
- this.Logger.LogTrace("Function call requests: {Requests}", string.Join(", ", result.ToolCalls.OfType().Select(ftc => $"{ftc.Name}({ftc.Arguments})")));
+ this.Logger.LogTrace("Function call requests: {Requests}", string.Join(", ", responseData.ToolCalls.OfType().Select(ftc => $"{ftc.FunctionName}({ftc.FunctionArguments})")));
}
- // Add the original assistant message to the chatOptions; this is required for the service
+ // Add the original assistant message to the chat messages; this is required for the service
// to understand the tool call responses. Also add the result message to the caller's chat
// history: if they don't want it, they can remove it, but this makes the data available,
// including metadata like usage.
- chatOptions.Messages.Add(GetRequestMessage(resultChoice.Message));
- chat.Add(result);
+ chatMessages.Add(GetRequestMessage(responseData));
+ chat.Add(responseContent);
// We must send back a response for every tool call, regardless of whether we successfully executed it or not.
// If we successfully execute it, we'll add the result. If we don't, we'll add an error.
- for (int toolCallIndex = 0; toolCallIndex < result.ToolCalls.Count; toolCallIndex++)
+ for (int toolCallIndex = 0; toolCallIndex < responseContent.ToolCalls.Count; toolCallIndex++)
{
- ChatCompletionsToolCall toolCall = result.ToolCalls[toolCallIndex];
+ ChatToolCall functionToolCall = responseContent.ToolCalls[toolCallIndex];
// We currently only know about function tool calls. If it's anything else, we'll respond with an error.
- if (toolCall is not ChatCompletionsFunctionToolCall functionToolCall)
+ if (functionToolCall.Kind != ChatToolCallKind.Function)
{
- AddResponseMessage(chatOptions, chat, result: null, "Error: Tool call was not a function call.", toolCall, this.Logger);
+ AddResponseMessage(chatMessages, chat, result: null, "Error: Tool call was not a function call.", functionToolCall, this.Logger);
continue;
}
// Parse the function call arguments.
- AzureOpenAIFunctionToolCall? openAIFunctionToolCall;
+ AzureOpenAIFunctionToolCall? azureOpenAIFunctionToolCall;
try
{
- openAIFunctionToolCall = new(functionToolCall);
+ azureOpenAIFunctionToolCall = new(functionToolCall);
}
catch (JsonException)
{
- AddResponseMessage(chatOptions, chat, result: null, "Error: Function call arguments were invalid JSON.", toolCall, this.Logger);
+ AddResponseMessage(chatMessages, chat, result: null, "Error: Function call arguments were invalid JSON.", functionToolCall, this.Logger);
continue;
}
@@ -491,16 +365,16 @@ internal async Task> GetChatMessageContentsAsy
// then we don't need to check this, as it'll be handled when we look up the function in the kernel to be able
// to invoke it. If we're permitting only a specific list of functions, though, then we need to explicitly check.
if (chatExecutionSettings.ToolCallBehavior?.AllowAnyRequestedKernelFunction is not true &&
- !IsRequestableTool(chatOptions, openAIFunctionToolCall))
+ !IsRequestableTool(chatOptions, azureOpenAIFunctionToolCall))
{
- AddResponseMessage(chatOptions, chat, result: null, "Error: Function call request for a function that wasn't defined.", toolCall, this.Logger);
+ AddResponseMessage(chatMessages, chat, result: null, "Error: Function call request for a function that wasn't defined.", functionToolCall, this.Logger);
continue;
}
// Find the function in the kernel and populate the arguments.
- if (!kernel!.Plugins.TryGetFunctionAndArguments(openAIFunctionToolCall, out KernelFunction? function, out KernelArguments? functionArgs))
+ if (!kernel!.Plugins.TryGetFunctionAndArguments(azureOpenAIFunctionToolCall, out KernelFunction? function, out KernelArguments? functionArgs))
{
- AddResponseMessage(chatOptions, chat, result: null, "Error: Requested function could not be found.", toolCall, this.Logger);
+ AddResponseMessage(chatMessages, chat, result: null, "Error: Requested function could not be found.", functionToolCall, this.Logger);
continue;
}
@@ -509,9 +383,9 @@ internal async Task> GetChatMessageContentsAsy
AutoFunctionInvocationContext invocationContext = new(kernel, function, functionResult, chat)
{
Arguments = functionArgs,
- RequestSequenceIndex = requestIndex - 1,
+ RequestSequenceIndex = requestIndex,
FunctionSequenceIndex = toolCallIndex,
- FunctionCount = result.ToolCalls.Count
+ FunctionCount = responseContent.ToolCalls.Count
};
s_inflightAutoInvokes.Value++;
@@ -535,7 +409,7 @@ internal async Task> GetChatMessageContentsAsy
catch (Exception e)
#pragma warning restore CA1031 // Do not catch general exception types
{
- AddResponseMessage(chatOptions, chat, null, $"Error: Exception while invoking function. {e.Message}", toolCall, this.Logger);
+ AddResponseMessage(chatMessages, chat, null, $"Error: Exception while invoking function. {e.Message}", functionToolCall, this.Logger);
continue;
}
finally
@@ -549,7 +423,7 @@ internal async Task> GetChatMessageContentsAsy
object functionResultValue = functionResult.GetValue() ?? string.Empty;
var stringResult = ProcessFunctionResult(functionResultValue, chatExecutionSettings.ToolCallBehavior);
- AddResponseMessage(chatOptions, chat, stringResult, errorMessage: null, functionToolCall, this.Logger);
+ AddResponseMessage(chatMessages, chat, stringResult, errorMessage: null, functionToolCall, this.Logger);
// If filter requested termination, returning latest function result.
if (invocationContext.Terminate)
@@ -562,46 +436,6 @@ internal async Task> GetChatMessageContentsAsy
return [chat.Last()];
}
}
-
- // Update tool use information for the next go-around based on having completed another iteration.
- Debug.Assert(chatExecutionSettings.ToolCallBehavior is not null);
-
- // Set the tool choice to none. If we end up wanting to use tools, we'll reset it to the desired value.
- chatOptions.ToolChoice = ChatCompletionsToolChoice.None;
- chatOptions.Tools.Clear();
-
- if (requestIndex >= chatExecutionSettings.ToolCallBehavior!.MaximumUseAttempts)
- {
- // Don't add any tools as we've reached the maximum attempts limit.
- if (this.Logger.IsEnabled(LogLevel.Debug))
- {
- this.Logger.LogDebug("Maximum use ({MaximumUse}) reached; removing the tool.", chatExecutionSettings.ToolCallBehavior!.MaximumUseAttempts);
- }
- }
- else
- {
- // Regenerate the tool list as necessary. The invocation of the function(s) could have augmented
- // what functions are available in the kernel.
- chatExecutionSettings.ToolCallBehavior.ConfigureOptions(kernel, chatOptions);
- }
-
- // Having already sent tools and with tool call information in history, the service can become unhappy ("[] is too short - 'tools'")
- // if we don't send any tools in subsequent requests, even if we say not to use any.
- if (chatOptions.ToolChoice == ChatCompletionsToolChoice.None)
- {
- Debug.Assert(chatOptions.Tools.Count == 0);
- chatOptions.Tools.Add(s_nonInvocableFunctionTool);
- }
-
- // Disable auto invocation if we've exceeded the allowed limit.
- if (requestIndex >= chatExecutionSettings.ToolCallBehavior!.MaximumAutoInvokeAttempts)
- {
- autoInvoke = false;
- if (this.Logger.IsEnabled(LogLevel.Debug))
- {
- this.Logger.LogDebug("Maximum auto-invoke ({MaximumAutoInvoke}) reached.", chatExecutionSettings.ToolCallBehavior!.MaximumAutoInvokeAttempts);
- }
- }
}
}
@@ -613,22 +447,30 @@ internal async IAsyncEnumerable GetStrea
{
Verify.NotNull(chat);
+ if (this.Logger.IsEnabled(LogLevel.Trace))
+ {
+ this.Logger.LogTrace("ChatHistory: {ChatHistory}, Settings: {Settings}",
+ JsonSerializer.Serialize(chat),
+ JsonSerializer.Serialize(executionSettings));
+ }
+
AzureOpenAIPromptExecutionSettings chatExecutionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings);
ValidateMaxTokens(chatExecutionSettings.MaxTokens);
- bool autoInvoke = kernel is not null && chatExecutionSettings.ToolCallBehavior?.MaximumAutoInvokeAttempts > 0 && s_inflightAutoInvokes.Value < MaxInflightAutoInvokes;
- ValidateAutoInvoke(autoInvoke, chatExecutionSettings.ResultsPerPrompt);
-
- var chatOptions = this.CreateChatCompletionsOptions(chatExecutionSettings, chat, kernel, this.DeploymentOrModelName);
-
StringBuilder? contentBuilder = null;
Dictionary? toolCallIdsByIndex = null;
Dictionary? functionNamesByIndex = null;
Dictionary? functionArgumentBuildersByIndex = null;
- for (int requestIndex = 1; ; requestIndex++)
+ var chatMessages = CreateChatCompletionMessages(chatExecutionSettings, chat);
+
+ for (int requestIndex = 0; ; requestIndex++)
{
+ var toolCallingConfig = this.GetToolCallingConfiguration(kernel, chatExecutionSettings, requestIndex);
+
+ var chatOptions = this.CreateChatCompletionsOptions(chatExecutionSettings, chat, toolCallingConfig, kernel);
+
// Reset state
contentBuilder?.Clear();
toolCallIdsByIndex?.Clear();
@@ -638,18 +480,18 @@ internal async IAsyncEnumerable GetStrea
// Stream the response.
IReadOnlyDictionary? metadata = null;
string? streamedName = null;
- ChatRole? streamedRole = default;
- CompletionsFinishReason finishReason = default;
- ChatCompletionsFunctionToolCall[]? toolCalls = null;
+ ChatMessageRole? streamedRole = default;
+ ChatFinishReason finishReason = default;
+ ChatToolCall[]? toolCalls = null;
FunctionCallContent[]? functionCallContents = null;
using (var activity = ModelDiagnostics.StartCompletionActivity(this.Endpoint, this.DeploymentOrModelName, ModelProvider, chat, chatExecutionSettings))
{
// Make the request.
- StreamingResponse response;
+ AsyncResultCollection response;
try
{
- response = await RunRequestAsync(() => this.Client.GetChatCompletionsStreamingAsync(chatOptions, cancellationToken)).ConfigureAwait(false);
+ response = RunRequest(() => this.Client.GetChatClient(this.DeploymentOrModelName).CompleteChatStreamingAsync(chatMessages, chatOptions, cancellationToken));
}
catch (Exception ex) when (activity is not null)
{
@@ -676,32 +518,44 @@ internal async IAsyncEnumerable GetStrea
throw;
}
- StreamingChatCompletionsUpdate update = responseEnumerator.Current;
+ StreamingChatCompletionUpdate update = responseEnumerator.Current;
metadata = GetResponseMetadata(update);
streamedRole ??= update.Role;
- streamedName ??= update.AuthorName;
+ //streamedName ??= update.AuthorName;
finishReason = update.FinishReason ?? default;
// If we're intending to invoke function calls, we need to consume that function call information.
- if (autoInvoke)
+ if (toolCallingConfig.AutoInvoke)
{
- if (update.ContentUpdate is { Length: > 0 } contentUpdate)
+ foreach (var contentPart in update.ContentUpdate)
{
- (contentBuilder ??= new()).Append(contentUpdate);
+ if (contentPart.Kind == ChatMessageContentPartKind.Text)
+ {
+ (contentBuilder ??= new()).Append(contentPart.Text);
+ }
}
- AzureOpenAIFunctionToolCall.TrackStreamingToolingUpdate(update.ToolCallUpdate, ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex);
+ AzureOpenAIFunctionToolCall.TrackStreamingToolingUpdate(update.ToolCallUpdates, ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex);
}
- var openAIStreamingChatMessageContent = new AzureOpenAIStreamingChatMessageContent(update, update.ChoiceIndex ?? 0, this.DeploymentOrModelName, metadata) { AuthorName = streamedName };
+ var openAIStreamingChatMessageContent = new AzureOpenAIStreamingChatMessageContent(update, 0, this.DeploymentOrModelName, metadata);
- if (update.ToolCallUpdate is StreamingFunctionToolCallUpdate functionCallUpdate)
+ foreach (var functionCallUpdate in update.ToolCallUpdates)
{
+ // Using the code below to distinguish and skip non - function call related updates.
+ // The Kind property of updates can't be reliably used because it's only initialized for the first update.
+ if (string.IsNullOrEmpty(functionCallUpdate.Id) &&
+ string.IsNullOrEmpty(functionCallUpdate.FunctionName) &&
+ string.IsNullOrEmpty(functionCallUpdate.FunctionArgumentsUpdate))
+ {
+ continue;
+ }
+
openAIStreamingChatMessageContent.Items.Add(new StreamingFunctionCallUpdateContent(
callId: functionCallUpdate.Id,
- name: functionCallUpdate.Name,
- arguments: functionCallUpdate.ArgumentsUpdate,
- functionCallIndex: functionCallUpdate.ToolCallIndex));
+ name: functionCallUpdate.FunctionName,
+ arguments: functionCallUpdate.FunctionArgumentsUpdate,
+ functionCallIndex: functionCallUpdate.Index));
}
streamedContents?.Add(openAIStreamingChatMessageContent);
@@ -726,7 +580,7 @@ internal async IAsyncEnumerable GetStrea
// Note that we don't check the FinishReason and instead check whether there are any tool calls, as the service
// may return a FinishReason of "stop" even if there are tool calls to be made, in particular if a required tool
// is specified.
- if (!autoInvoke ||
+ if (!toolCallingConfig.AutoInvoke ||
toolCallIdsByIndex is not { Count: > 0 })
{
yield break;
@@ -738,27 +592,27 @@ internal async IAsyncEnumerable GetStrea
// Log the requests
if (this.Logger.IsEnabled(LogLevel.Trace))
{
- this.Logger.LogTrace("Function call requests: {Requests}", string.Join(", ", toolCalls.Select(fcr => $"{fcr.Name}({fcr.Arguments})")));
+ this.Logger.LogTrace("Function call requests: {Requests}", string.Join(", ", toolCalls.Select(fcr => $"{fcr.FunctionName}({fcr.FunctionName})")));
}
else if (this.Logger.IsEnabled(LogLevel.Debug))
{
this.Logger.LogDebug("Function call requests: {Requests}", toolCalls.Length);
}
- // Add the original assistant message to the chatOptions; this is required for the service
+ // Add the original assistant message to the chat messages; this is required for the service
// to understand the tool call responses.
- chatOptions.Messages.Add(GetRequestMessage(streamedRole ?? default, content, streamedName, toolCalls));
+ chatMessages.Add(GetRequestMessage(streamedRole ?? default, content, streamedName, toolCalls));
chat.Add(this.GetChatMessage(streamedRole ?? default, content, toolCalls, functionCallContents, metadata, streamedName));
// Respond to each tooling request.
for (int toolCallIndex = 0; toolCallIndex < toolCalls.Length; toolCallIndex++)
{
- ChatCompletionsFunctionToolCall toolCall = toolCalls[toolCallIndex];
+ ChatToolCall toolCall = toolCalls[toolCallIndex];
// We currently only know about function tool calls. If it's anything else, we'll respond with an error.
- if (string.IsNullOrEmpty(toolCall.Name))
+ if (string.IsNullOrEmpty(toolCall.FunctionName))
{
- AddResponseMessage(chatOptions, chat, result: null, "Error: Tool call was not a function call.", toolCall, this.Logger);
+ AddResponseMessage(chatMessages, chat, result: null, "Error: Tool call was not a function call.", toolCall, this.Logger);
continue;
}
@@ -770,7 +624,7 @@ internal async IAsyncEnumerable GetStrea
}
catch (JsonException)
{
- AddResponseMessage(chatOptions, chat, result: null, "Error: Function call arguments were invalid JSON.", toolCall, this.Logger);
+ AddResponseMessage(chatMessages, chat, result: null, "Error: Function call arguments were invalid JSON.", toolCall, this.Logger);
continue;
}
@@ -780,14 +634,14 @@ internal async IAsyncEnumerable GetStrea
if (chatExecutionSettings.ToolCallBehavior?.AllowAnyRequestedKernelFunction is not true &&
!IsRequestableTool(chatOptions, openAIFunctionToolCall))
{
- AddResponseMessage(chatOptions, chat, result: null, "Error: Function call request for a function that wasn't defined.", toolCall, this.Logger);
+ AddResponseMessage(chatMessages, chat, result: null, "Error: Function call request for a function that wasn't defined.", toolCall, this.Logger);
continue;
}
// Find the function in the kernel and populate the arguments.
if (!kernel!.Plugins.TryGetFunctionAndArguments(openAIFunctionToolCall, out KernelFunction? function, out KernelArguments? functionArgs))
{
- AddResponseMessage(chatOptions, chat, result: null, "Error: Requested function could not be found.", toolCall, this.Logger);
+ AddResponseMessage(chatMessages, chat, result: null, "Error: Requested function could not be found.", toolCall, this.Logger);
continue;
}
@@ -796,7 +650,7 @@ internal async IAsyncEnumerable GetStrea
AutoFunctionInvocationContext invocationContext = new(kernel, function, functionResult, chat)
{
Arguments = functionArgs,
- RequestSequenceIndex = requestIndex - 1,
+ RequestSequenceIndex = requestIndex,
FunctionSequenceIndex = toolCallIndex,
FunctionCount = toolCalls.Length
};
@@ -822,7 +676,7 @@ internal async IAsyncEnumerable GetStrea
catch (Exception e)
#pragma warning restore CA1031 // Do not catch general exception types
{
- AddResponseMessage(chatOptions, chat, result: null, $"Error: Exception while invoking function. {e.Message}", toolCall, this.Logger);
+ AddResponseMessage(chatMessages, chat, result: null, $"Error: Exception while invoking function. {e.Message}", toolCall, this.Logger);
continue;
}
finally
@@ -836,7 +690,7 @@ internal async IAsyncEnumerable GetStrea
object functionResultValue = functionResult.GetValue() ?? string.Empty;
var stringResult = ProcessFunctionResult(functionResultValue, chatExecutionSettings.ToolCallBehavior);
- AddResponseMessage(chatOptions, chat, stringResult, errorMessage: null, toolCall, this.Logger);
+ AddResponseMessage(chatMessages, chat, stringResult, errorMessage: null, toolCall, this.Logger);
// If filter requested termination, returning latest function result and breaking request iteration loop.
if (invocationContext.Terminate)
@@ -852,57 +706,17 @@ internal async IAsyncEnumerable GetStrea
yield break;
}
}
-
- // Update tool use information for the next go-around based on having completed another iteration.
- Debug.Assert(chatExecutionSettings.ToolCallBehavior is not null);
-
- // Set the tool choice to none. If we end up wanting to use tools, we'll reset it to the desired value.
- chatOptions.ToolChoice = ChatCompletionsToolChoice.None;
- chatOptions.Tools.Clear();
-
- if (requestIndex >= chatExecutionSettings.ToolCallBehavior!.MaximumUseAttempts)
- {
- // Don't add any tools as we've reached the maximum attempts limit.
- if (this.Logger.IsEnabled(LogLevel.Debug))
- {
- this.Logger.LogDebug("Maximum use ({MaximumUse}) reached; removing the tool.", chatExecutionSettings.ToolCallBehavior!.MaximumUseAttempts);
- }
- }
- else
- {
- // Regenerate the tool list as necessary. The invocation of the function(s) could have augmented
- // what functions are available in the kernel.
- chatExecutionSettings.ToolCallBehavior.ConfigureOptions(kernel, chatOptions);
- }
-
- // Having already sent tools and with tool call information in history, the service can become unhappy ("[] is too short - 'tools'")
- // if we don't send any tools in subsequent requests, even if we say not to use any.
- if (chatOptions.ToolChoice == ChatCompletionsToolChoice.None)
- {
- Debug.Assert(chatOptions.Tools.Count == 0);
- chatOptions.Tools.Add(s_nonInvocableFunctionTool);
- }
-
- // Disable auto invocation if we've exceeded the allowed limit.
- if (requestIndex >= chatExecutionSettings.ToolCallBehavior!.MaximumAutoInvokeAttempts)
- {
- autoInvoke = false;
- if (this.Logger.IsEnabled(LogLevel.Debug))
- {
- this.Logger.LogDebug("Maximum auto-invoke ({MaximumAutoInvoke}) reached.", chatExecutionSettings.ToolCallBehavior!.MaximumAutoInvokeAttempts);
- }
- }
}
}
/// Checks if a tool call is for a function that was defined.
- private static bool IsRequestableTool(ChatCompletionsOptions options, AzureOpenAIFunctionToolCall ftc)
+ private static bool IsRequestableTool(ChatCompletionOptions options, AzureOpenAIFunctionToolCall ftc)
{
- IList tools = options.Tools;
+ IList tools = options.Tools;
for (int i = 0; i < tools.Count; i++)
{
- if (tools[i] is ChatCompletionsFunctionToolDefinition def &&
- string.Equals(def.Name, ftc.FullyQualifiedName, StringComparison.OrdinalIgnoreCase))
+ if (tools[i].Kind == ChatToolKind.Function &&
+ string.Equals(tools[i].FunctionName, ftc.FullyQualifiedName, StringComparison.OrdinalIgnoreCase))
{
return true;
}
@@ -950,22 +764,21 @@ internal void AddAttribute(string key, string? value)
/// Gets options to use for an OpenAIClient
/// Custom for HTTP requests.
- /// Optional API version.
/// An instance of .
- internal static OpenAIClientOptions GetOpenAIClientOptions(HttpClient? httpClient, OpenAIClientOptions.ServiceVersion? serviceVersion = null)
+ internal static AzureOpenAIClientOptions GetOpenAIClientOptions(HttpClient? httpClient)
{
- OpenAIClientOptions options = serviceVersion is not null ?
- new(serviceVersion.Value) :
- new();
+ AzureOpenAIClientOptions options = new()
+ {
+ ApplicationId = HttpHeaderConstant.Values.UserAgent,
+ };
- options.Diagnostics.ApplicationId = HttpHeaderConstant.Values.UserAgent;
- options.AddPolicy(new AddHeaderRequestPolicy(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(ClientCore))), HttpPipelinePosition.PerCall);
+ options.AddPolicy(CreateRequestHeaderPolicy(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(ClientCore))), PipelinePosition.PerCall);
if (httpClient is not null)
{
- options.Transport = new HttpClientTransport(httpClient);
- options.RetryPolicy = new RetryPolicy(maxRetries: 0); // Disable Azure SDK retry policy if and only if a custom HttpClient is provided.
- options.Retry.NetworkTimeout = Timeout.InfiniteTimeSpan; // Disable Azure SDK default timeout
+ options.Transport = new HttpClientPipelineTransport(httpClient);
+ options.RetryPolicy = new ClientRetryPolicy(maxRetries: 0); // Disable Azure SDK retry policy if and only if a custom HttpClient is provided.
+ options.NetworkTimeout = Timeout.InfiniteTimeSpan; // Disable Azure SDK default timeout
}
return options;
@@ -998,129 +811,44 @@ private static ChatHistory CreateNewChat(string? text = null, AzureOpenAIPromptE
return chat;
}
- private static CompletionsOptions CreateCompletionsOptions(string text, AzureOpenAIPromptExecutionSettings executionSettings, string deploymentOrModelName)
- {
- if (executionSettings.ResultsPerPrompt is < 1 or > MaxResultsPerPrompt)
- {
- throw new ArgumentOutOfRangeException($"{nameof(executionSettings)}.{nameof(executionSettings.ResultsPerPrompt)}", executionSettings.ResultsPerPrompt, $"The value must be in range between 1 and {MaxResultsPerPrompt}, inclusive.");
- }
-
- var options = new CompletionsOptions
- {
- Prompts = { text.Replace("\r\n", "\n") }, // normalize line endings
- MaxTokens = executionSettings.MaxTokens,
- Temperature = (float?)executionSettings.Temperature,
- NucleusSamplingFactor = (float?)executionSettings.TopP,
- FrequencyPenalty = (float?)executionSettings.FrequencyPenalty,
- PresencePenalty = (float?)executionSettings.PresencePenalty,
- Echo = false,
- ChoicesPerPrompt = executionSettings.ResultsPerPrompt,
- GenerationSampleCount = executionSettings.ResultsPerPrompt,
- LogProbabilityCount = executionSettings.TopLogprobs,
- User = executionSettings.User,
- DeploymentName = deploymentOrModelName
- };
-
- if (executionSettings.TokenSelectionBiases is not null)
- {
- foreach (var keyValue in executionSettings.TokenSelectionBiases)
- {
- options.TokenSelectionBiases.Add(keyValue.Key, keyValue.Value);
- }
- }
-
- if (executionSettings.StopSequences is { Count: > 0 })
- {
- foreach (var s in executionSettings.StopSequences)
- {
- options.StopSequences.Add(s);
- }
- }
-
- return options;
- }
-
- private ChatCompletionsOptions CreateChatCompletionsOptions(
+ private ChatCompletionOptions CreateChatCompletionsOptions(
AzureOpenAIPromptExecutionSettings executionSettings,
ChatHistory chatHistory,
- Kernel? kernel,
- string deploymentOrModelName)
+ ToolCallingConfig toolCallingConfig,
+ Kernel? kernel)
{
- if (executionSettings.ResultsPerPrompt is < 1 or > MaxResultsPerPrompt)
- {
- throw new ArgumentOutOfRangeException($"{nameof(executionSettings)}.{nameof(executionSettings.ResultsPerPrompt)}", executionSettings.ResultsPerPrompt, $"The value must be in range between 1 and {MaxResultsPerPrompt}, inclusive.");
- }
-
- if (this.Logger.IsEnabled(LogLevel.Trace))
- {
- this.Logger.LogTrace("ChatHistory: {ChatHistory}, Settings: {Settings}",
- JsonSerializer.Serialize(chatHistory),
- JsonSerializer.Serialize(executionSettings));
- }
-
- var options = new ChatCompletionsOptions
+ var options = new ChatCompletionOptions
{
MaxTokens = executionSettings.MaxTokens,
Temperature = (float?)executionSettings.Temperature,
- NucleusSamplingFactor = (float?)executionSettings.TopP,
+ TopP = (float?)executionSettings.TopP,
FrequencyPenalty = (float?)executionSettings.FrequencyPenalty,
PresencePenalty = (float?)executionSettings.PresencePenalty,
- ChoiceCount = executionSettings.ResultsPerPrompt,
- DeploymentName = deploymentOrModelName,
Seed = executionSettings.Seed,
User = executionSettings.User,
- LogProbabilitiesPerToken = executionSettings.TopLogprobs,
- EnableLogProbabilities = executionSettings.Logprobs,
- AzureExtensionsOptions = executionSettings.AzureChatExtensionsOptions
+ TopLogProbabilityCount = executionSettings.TopLogprobs,
+ IncludeLogProbabilities = executionSettings.Logprobs,
+ ResponseFormat = GetResponseFormat(executionSettings) ?? ChatResponseFormat.Text,
+ ToolChoice = toolCallingConfig.Choice,
};
- switch (executionSettings.ResponseFormat)
+ if (executionSettings.AzureChatDataSource is not null)
{
- case ChatCompletionsResponseFormat formatObject:
- // If the response format is an Azure SDK ChatCompletionsResponseFormat, just pass it along.
- options.ResponseFormat = formatObject;
- break;
-
- case string formatString:
- // If the response format is a string, map the ones we know about, and ignore the rest.
- switch (formatString)
- {
- case "json_object":
- options.ResponseFormat = ChatCompletionsResponseFormat.JsonObject;
- break;
-
- case "text":
- options.ResponseFormat = ChatCompletionsResponseFormat.Text;
- break;
- }
- break;
-
- case JsonElement formatElement:
- // This is a workaround for a type mismatch when deserializing a JSON into an object? type property.
- // Handling only string formatElement.
- if (formatElement.ValueKind == JsonValueKind.String)
- {
- string formatString = formatElement.GetString() ?? "";
- switch (formatString)
- {
- case "json_object":
- options.ResponseFormat = ChatCompletionsResponseFormat.JsonObject;
- break;
+#pragma warning disable AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
+ options.AddDataSource(executionSettings.AzureChatDataSource);
+#pragma warning restore AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
+ }
- case "text":
- options.ResponseFormat = ChatCompletionsResponseFormat.Text;
- break;
- }
- }
- break;
+ if (toolCallingConfig.Tools is { Count: > 0 } tools)
+ {
+ options.Tools.AddRange(tools);
}
- executionSettings.ToolCallBehavior?.ConfigureOptions(kernel, options);
if (executionSettings.TokenSelectionBiases is not null)
{
foreach (var keyValue in executionSettings.TokenSelectionBiases)
{
- options.TokenSelectionBiases.Add(keyValue.Key, keyValue.Value);
+ options.LogitBiases.Add(keyValue.Key, keyValue.Value);
}
}
@@ -1132,52 +860,51 @@ private ChatCompletionsOptions CreateChatCompletionsOptions(
}
}
+ return options;
+ }
+
+ private static List CreateChatCompletionMessages(AzureOpenAIPromptExecutionSettings executionSettings, ChatHistory chatHistory)
+ {
+ List messages = [];
+
if (!string.IsNullOrWhiteSpace(executionSettings.ChatSystemPrompt) && !chatHistory.Any(m => m.Role == AuthorRole.System))
{
- options.Messages.AddRange(GetRequestMessages(new ChatMessageContent(AuthorRole.System, executionSettings!.ChatSystemPrompt), executionSettings.ToolCallBehavior));
+ messages.Add(new SystemChatMessage(executionSettings.ChatSystemPrompt));
}
foreach (var message in chatHistory)
{
- options.Messages.AddRange(GetRequestMessages(message, executionSettings.ToolCallBehavior));
+ messages.AddRange(GetRequestMessages(message, executionSettings.ToolCallBehavior));
}
- return options;
+ return messages;
}
- private static ChatRequestMessage GetRequestMessage(ChatRole chatRole, string contents, string? name, ChatCompletionsFunctionToolCall[]? tools)
+ private static ChatMessage GetRequestMessage(ChatMessageRole chatRole, string content, string? name, ChatToolCall[]? tools)
{
- if (chatRole == ChatRole.User)
+ if (chatRole == ChatMessageRole.User)
{
- return new ChatRequestUserMessage(contents) { Name = name };
+ return new UserChatMessage(content) { ParticipantName = name };
}
- if (chatRole == ChatRole.System)
+ if (chatRole == ChatMessageRole.System)
{
- return new ChatRequestSystemMessage(contents) { Name = name };
+ return new SystemChatMessage(content) { ParticipantName = name };
}
- if (chatRole == ChatRole.Assistant)
+ if (chatRole == ChatMessageRole.Assistant)
{
- var msg = new ChatRequestAssistantMessage(contents) { Name = name };
- if (tools is not null)
- {
- foreach (ChatCompletionsFunctionToolCall tool in tools)
- {
- msg.ToolCalls.Add(tool);
- }
- }
- return msg;
+ return new AssistantChatMessage(tools, content) { ParticipantName = name };
}
throw new NotImplementedException($"Role {chatRole} is not implemented");
}
- private static List