From 7caaeb383c542f02504093c5df47e369a1cb08c2 Mon Sep 17 00:00:00 2001 From: Takuya Ono Date: Mon, 29 May 2023 22:30:02 +0900 Subject: [PATCH] =?UTF-8?q?=20=E2=9C=A8=20Feature(src/openai-thread-comple?= =?UTF-8?q?tion.js):=20add=20support=20for=20Azure=20OpenAI=20Service?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit If setting Azure OpenAI API key instead of OpenAI API key, it uses Azure OpenAI Service. The instance name, deployment name and API version are now configurable via environment variables. The API request base path and headers are set upped accordingly to use the Azure OpenAI Service API. --- README.md | 16 ++++++++++++++++ src/openai-thread-completion.js | 8 ++++++++ 2 files changed, 24 insertions(+) diff --git a/README.md b/README.md index 5cb4902..f07b20a 100644 --- a/README.md +++ b/README.md @@ -27,6 +27,10 @@ or when [running the docker image](#using-the-ready-made-image) or when configur | OPENAI_MODEL_NAME | no | `gpt-3.5-turbo` | The OpenAI language model to use, defaults to `gpt-3.5-turbo` | | OPENAI_MAX_TOKENS | no | `2000` | The maximum number of tokens to pass to the OpenAI API, defaults to 2000 | | OPENAI_TEMPERATURE | no | `0.2` | The sampling temperature to use, between 0 and 2, defaults to 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. | +| AZURE_OPENAI_API_KEY | no | `0123456789abcdefghijklmno` | The Azure OpenAI Service API key to authoenticate | +| AZURE_OPENAI_API_INSTANCE_NAME | no | `example-name` | The instance name on the Azure OpenAI Service | +| AZURE_OPENAI_API_DEPLOYMENT_NAME | no | `gpt-35-turbo` | The name of the deployed model on the Azure OpenAI Service | +| AZURE_OPENAI_API_VERSION | no | `2023-03-15-preview` | The Azure OpenAI version | | YFILES_SERVER_URL | no | `http://localhost:3835` | The URL to the yFiles graph service for embedding auto-generated diagrams. | | NODE_EXTRA_CA_CERTS | no | `/file/to/cert.crt` | a link to a certificate file to pass to node.js for authenticating self-signed certificates | | MATTERMOST_BOTNAME | no | `"@chatgpt"` | the name of the bot user in Mattermost, defaults to '@chatgpt' | @@ -51,6 +55,18 @@ docker run -d --restart unless-stopped \ ghcr.io/yguy/chatgpt-mattermost-bot:latest ``` +As Azure OpenAI Service case +```bash +docker run -d --restart unless-stopped \ + -e MATTERMOST_URL=https://mattermost.server \ + -e MATTERMOST_TOKEN=abababacdcdcd \ + -e AZURE_OPENAI_API_KEY=234234234234234234 \ + -e AZURE_OPENAI_API_INSTANCE_NAME=example-name \ + --name chatbot \ + ghcr.io/yguy/chatgpt-mattermost-bot:latest +``` + + ## Building the docker image yourself First step is to clone this repo. diff --git a/src/openai-thread-completion.js b/src/openai-thread-completion.js index 0660f2d..99c155d 100644 --- a/src/openai-thread-completion.js +++ b/src/openai-thread-completion.js @@ -2,6 +2,14 @@ const { Configuration, OpenAIApi } = require("openai"); const configuration = new Configuration({ apiKey: process.env["OPENAI_API_KEY"] }); +const azureOpenAiApiKey = process.env["AZURE_OPENAI_API_KEY"] +if ( azureOpenAiApiKey ) { + configuration.baseOptions = { + headers: { 'api-key': azureOpenAiApiKey }, + params: { 'api-version': process.env["AZURE_OPENAI_API_VERSION"] ?? '2023-03-15-preview' } + }; + configuration.basePath = 'https://' + process.env["AZURE_OPENAI_API_INSTANCE_NAME"] + '.openai.azure.com/openai/deployments/' + process.env["AZURE_OPENAI_API_DEPLOYMENT_NAME" ?? 'gpt-35-turbo']; +} const openai = new OpenAIApi(configuration); const model = process.env["OPENAI_MODEL_NAME"] ?? 'gpt-3.5-turbo'