diff --git a/conversation/csharp/http/README.md b/conversation/csharp/http/README.md new file mode 100644 index 000000000..b1d8a3144 --- /dev/null +++ b/conversation/csharp/http/README.md @@ -0,0 +1,85 @@ +# Dapr Conversation API (C# HTTP) + +In this quickstart, you'll send an input to a mock Large Language Model (LLM) using Dapr's Conversation API. This API is responsible for providing one consistent API entry point to talk to underlying LLM providers. + +Visit [this](https://v1-15.docs.dapr.io/developing-applications/building-blocks/conversation/conversation-overview/) link for more information about Dapr and the Conversation API. + +> **Note:** This example leverages HTTP `requests` only. If you are looking for the example using the Dapr Client SDK (recommended) [click here](../sdk/). + +This quickstart includes one app: + +- Conversation, responsible for sending an input to the underlying LLM and retrieving an output. + +## Run the app with the template file + +This section shows how to run the application using the [multi-app run template file](https://docs.dapr.io/developing-applications/local-development/multi-app-dapr-run/multi-app-overview/) and Dapr CLI with `dapr run -f .`. + +This example uses the default LLM Component provided by Dapr which simply echoes the input provided, for testing purposes. Integrate with popular LLM models by using one of the other [supported conversation components](https://v1-15.docs.dapr.io/reference/components-reference/supported-conversation/). + +Open a new terminal window and run the multi app run template: + + + +```bash +dapr run -f . +``` + +The terminal console output should look similar to this, where: + +- The app sends an input `What is dapr?` to the `echo` Component mock LLM. +- The mock LLM echoes `What is dapr?`. + +```text +== APP - conversation == Input sent: What is dapr? +== APP - conversation == Output response: What is dapr? +``` + + + +2. Stop and clean up application processes. + + + +```bash +dapr stop -f . +``` + + + +## Run the app individually + +1. Open a terminal and run the `conversation` app. Build the dependencies if you haven't already. + +```bash +cd ./conversation +dotnet build +``` + +2. Run the Dapr process alongside the application. + +```bash +dapr run --app-id conversation --resources-path ../../../components/ -- dotnet run +``` + +The terminal console output should look similar to this, where: + +- The app sends an input `What is dapr?` to the `echo` Component mock LLM. +- The mock LLM echoes `What is dapr?`. + +```text +== APP - conversation == Input sent: What is dapr? +== APP - conversation == Output response: What is dapr? +``` diff --git a/conversation/csharp/http/conversation/Program.cs b/conversation/csharp/http/conversation/Program.cs new file mode 100644 index 000000000..4ea1cbaf2 --- /dev/null +++ b/conversation/csharp/http/conversation/Program.cs @@ -0,0 +1,67 @@ +/* +Copyright 2024 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +using System.Net.Http; +using System.Text.Json; +using System.Text; + +class Program +{ + private const string ConversationComponentName = "echo"; + + static async Task Main(string[] args) + { + var daprHost = Environment.GetEnvironmentVariable("DAPR_HOST") ?? "http://localhost"; + var daprHttpPort = Environment.GetEnvironmentVariable("DAPR_HTTP_PORT") ?? "3500"; + + var client = new HttpClient + { + Timeout = TimeSpan.FromSeconds(15) + }; + + var inputBody = new + { + name = "echo", + inputs = new[] { new { message = "What is dapr?" } }, + parameters = new { }, + metadata = new { } + }; + + var daprUrl = $"{daprHost}:{daprHttpPort}/v1.0-alpha1/conversation/{ConversationComponentName}/converse"; + + try + { + var content = new StringContent(JsonSerializer.Serialize(inputBody), Encoding.UTF8, "application/json"); + + // Send a request to the echo mock LLM component + var response = await client.PostAsync(daprUrl, content); + response.EnsureSuccessStatusCode(); + + Console.WriteLine("Input sent: " + inputBody.inputs[0].message); + + var responseBody = await response.Content.ReadAsStringAsync(); + + // Parse the response + var data = JsonSerializer.Deserialize>>>(responseBody); + var result = data?["outputs"]?[0]?["result"]; + + Console.WriteLine("Output response: " + result); + } + catch (Exception ex) + { + Console.WriteLine("Error: " + ex.Message); + } + } +} diff --git a/conversation/csharp/http/conversation/Program.csproj b/conversation/csharp/http/conversation/Program.csproj new file mode 100644 index 000000000..47cf3be7e --- /dev/null +++ b/conversation/csharp/http/conversation/Program.csproj @@ -0,0 +1,14 @@ + + + + Exe + net8.0 + enable + enable + + + + + + + diff --git a/conversation/csharp/http/dapr.yaml b/conversation/csharp/http/dapr.yaml new file mode 100644 index 000000000..d9d9b265a --- /dev/null +++ b/conversation/csharp/http/dapr.yaml @@ -0,0 +1,8 @@ +version: 1 +common: + resourcesPath: ../../components/ +apps: + - appDirPath: ./conversation/ + appID: conversation + daprHTTPPort: 3500 + command: ["dotnet", "run"] \ No newline at end of file diff --git a/conversation/csharp/http/makefile b/conversation/csharp/http/makefile new file mode 100644 index 000000000..e7a8826bf --- /dev/null +++ b/conversation/csharp/http/makefile @@ -0,0 +1,2 @@ +include ../../../docker.mk +include ../../../validate.mk \ No newline at end of file diff --git a/conversation/csharp/sdk/README.md b/conversation/csharp/sdk/README.md new file mode 100644 index 000000000..6a5026f2a --- /dev/null +++ b/conversation/csharp/sdk/README.md @@ -0,0 +1,85 @@ +# Dapr Conversation API (C# SDK) + +In this quickstart, you'll send an input to a mock Large Language Model (LLM) using Dapr's Conversation API. This API is responsible for providing one consistent API entry point to talk to underlying LLM providers. + +Visit [this](https://v1-15.docs.dapr.io/developing-applications/building-blocks/conversation/conversation-overview/) link for more information about Dapr and the Conversation API. + +> **Note:** This example leverages the Dapr SDK. If you are looking for the example using the HTTP API [click here](../http/). + +This quickstart includes one app: + +- Conversation, responsible for sending an input to the underlying LLM and retrieving an output. + +## Run the app with the template file + +This section shows how to run the application using the [multi-app run template file](https://docs.dapr.io/developing-applications/local-development/multi-app-dapr-run/multi-app-overview/) and Dapr CLI with `dapr run -f .`. + +This example uses the default LLM Component provided by Dapr which simply echoes the input provided, for testing purposes. Integrate with popular LLM models by using one of the other [supported conversation components](https://v1-15.docs.dapr.io/reference/components-reference/supported-conversation/). + +Open a new terminal window and run the multi app run template: + + + +```bash +dapr run -f . +``` + +The terminal console output should look similar to this, where: + +- The app sends an input `What is dapr?` to the `echo` Component mock LLM. +- The mock LLM echoes `What is dapr?`. + +```text +== APP - conversation == Input sent: What is dapr? +== APP - conversation == Output response: What is dapr? +``` + + + +2. Stop and clean up application processes. + + + +```bash +dapr stop -f . +``` + + + +## Run the app individually + +1. Open a terminal and run the `conversation` app. Build the dependencies if you haven't already. + +```bash +cd ./conversation +dotnet build +``` + +2. Run the Dapr process alongside the application. + +```bash +dapr run --app-id conversation --resources-path ../../../components/ -- dotnet run +``` + +The terminal console output should look similar to this, where: + +- The app sends an input `What is dapr?` to the `echo` Component mock LLM. +- The mock LLM echoes `What is dapr?`. + +```text +== APP - conversation == Input sent: What is dapr? +== APP - conversation == Output response: What is dapr? +``` diff --git a/conversation/csharp/sdk/conversation/Program.cs b/conversation/csharp/sdk/conversation/Program.cs new file mode 100644 index 000000000..c5562381a --- /dev/null +++ b/conversation/csharp/sdk/conversation/Program.cs @@ -0,0 +1,54 @@ +/* +Copyright 2024 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +using Dapr.AI.Conversation; +using Dapr.AI.Conversation.Extensions; + +class Program +{ + private const string ConversationComponentName = "echo"; + + static async Task Main(string[] args) + { + const string prompt = "What is dapr?"; + + var builder = WebApplication.CreateBuilder(args); + builder.Services.AddDaprConversationClient(); + var app = builder.Build(); + + //Instantiate Dapr Conversation Client + var conversationClient = app.Services.GetRequiredService(); + + try + { + // Send a request to the echo mock LLM component + var response = await conversationClient.ConverseAsync(ConversationComponentName, [new(prompt, DaprConversationRole.Generic)]); + Console.WriteLine("Input sent: " + prompt); + + if (response != null) + { + Console.Write("Output response:"); + foreach (var resp in response.Outputs) + { + Console.WriteLine($" {resp.Result}"); + } + } + } + catch (Exception ex) + { + Console.WriteLine("Error: " + ex.Message); + } + } +} diff --git a/conversation/csharp/sdk/conversation/Program.csproj b/conversation/csharp/sdk/conversation/Program.csproj new file mode 100644 index 000000000..c641f3a3f --- /dev/null +++ b/conversation/csharp/sdk/conversation/Program.csproj @@ -0,0 +1,14 @@ + + + + Exe + net8.0 + enable + enable + + + + + + + diff --git a/conversation/csharp/sdk/dapr.yaml b/conversation/csharp/sdk/dapr.yaml new file mode 100644 index 000000000..d9d9b265a --- /dev/null +++ b/conversation/csharp/sdk/dapr.yaml @@ -0,0 +1,8 @@ +version: 1 +common: + resourcesPath: ../../components/ +apps: + - appDirPath: ./conversation/ + appID: conversation + daprHTTPPort: 3500 + command: ["dotnet", "run"] \ No newline at end of file diff --git a/conversation/csharp/sdk/makefile b/conversation/csharp/sdk/makefile new file mode 100644 index 000000000..e7a8826bf --- /dev/null +++ b/conversation/csharp/sdk/makefile @@ -0,0 +1,2 @@ +include ../../../docker.mk +include ../../../validate.mk \ No newline at end of file diff --git a/conversation/go/http/README.md b/conversation/go/http/README.md index faf34d743..36c81b23a 100644 --- a/conversation/go/http/README.md +++ b/conversation/go/http/README.md @@ -4,11 +4,11 @@ In this quickstart, you'll send an input to a mock Large Language Model (LLM) us Visit [this](https://v1-15.docs.dapr.io/developing-applications/building-blocks/conversation/conversation-overview/) link for more information about Dapr and the Conversation API. -> **Note:** This example leverages HTTP `requests` only. If you are looking for the example using the Dapr Client SDK (recommended) [click here](../sdk/). +> **Note:** This example leverages HTTP `requests` only. If you are looking for the example using the Dapr Client SDK (recommended) [click here](../sdk/). This quickstart includes one app: -- `conversation.go`, responsible for sending and input to the underlying LLM and retrieving an output. +- `conversation.go`, responsible for sending an input to the underlying LLM and retrieving an output. ## Run the app with the template file @@ -47,7 +47,7 @@ The terminal console output should look similar to this, where: -2. Stop and clean up application processes +2. Stop and clean up application processes. -2. Stop and clean up application processes +2. Stop and clean up application processes.