Skip to content

Commit

Permalink
feat: optimize read config file
Browse files Browse the repository at this point in the history
  • Loading branch information
stulzq committed Sep 19, 2023
1 parent 6a72aac commit e3cd102
Show file tree
Hide file tree
Showing 4 changed files with 122 additions and 13 deletions.
83 changes: 79 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ Verified support projects:
| [chatgpt-web](https://github.com/Chanzhaoyu/chatgpt-web) ||
| [chatbox](https://github.com/Bin-Huang/chatbox) ||
| [langchain](https://python.langchain.com/en/latest/) ||
| [ChatGPT-Next-Web](https://github.com/Yidadaa/ChatGPT-Next-Web) ||

## Get Start

Expand All @@ -30,8 +31,8 @@ To successfully make a call against Azure OpenAI, you'll need the following:
| Name | Desc | Default |
| --------------------- | ------------------------------------------------------------ | ----------------------------- |
| AZURE_OPENAI_ENDPOINT | This value can be found in the **Keys & Endpoint** section when examining your resource from the Azure portal. Alternatively, you can find the value in **Azure OpenAI Studio** > **Playground** > **Code View**. An example endpoint is: `https://docs-test-001.openai.azure.com/`. | N |
| AZURE_OPENAI_API_VER | [See here](https://learn.microsoft.com/en-us/azure/cognitive-services/openai/quickstart?tabs=command-line&pivots=rest-api) or Azure OpenAI Studio | 2023-03-15-preview |
| AZURE_OPENAI_MODEL_MAPPER | This value will correspond to the custom name you chose for your deployment when you deployed a model. This value can be found under **Resource Management** > **Deployments** in the Azure portal or alternatively under **Management** > **Deployments** in Azure OpenAI Studio. | gpt-3.5-turbo=gpt-35-turbo |
| AZURE_OPENAI_API_VER | [See here](https://learn.microsoft.com/en-us/azure/cognitive-services/openai/quickstart?tabs=command-line&pivots=rest-api) or Azure OpenAI Studio | 2023-07-01-preview |
| AZURE_OPENAI_MODEL_MAPPER | This value will correspond to the custom name you chose for your deployment when you deployed a model. This value can be found under **Resource Management** > **Deployments** in the Azure portal or alternatively under **Management** > **Deployments** in Azure OpenAI Studio. | N |

`AZURE_OPENAI_MODEL_MAPPER` is a mapping from Azure OpenAI deployed model names to official OpenAI model names. You can use commas to separate multiple mappings.

Expand All @@ -46,7 +47,7 @@ Azure Deployment Names: **Resource Management** > **Deployments**
**Example:**

````yaml
AZURE_OPENAI_MODEL_MAPPER: gpt-3.5-turbo=azure-gpt-35
AZURE_OPENAI_MODEL_MAPPER: gpt-3.5-turbo=gpt-35-turbo
````

![Screenshot of the overview UI for an OpenAI Resource in the Azure portal with the endpoint & access keys location circled in red.](assets/images/endpoint.png)
Expand Down Expand Up @@ -135,7 +136,7 @@ services:
environment:
AZURE_OPENAI_ENDPOINT: <Azure OpenAI API Endpoint>
AZURE_OPENAI_MODEL_MAPPER: <Azure OpenAI API Deployment Mapper>
AZURE_OPENAI_API_VER: 2023-03-15-preview
AZURE_OPENAI_API_VER: 2023-07-01-preview
networks:
- chatgpt-ns

Expand All @@ -150,3 +151,77 @@ Run:
docker compose up -d
````

### Use ChatGPT-Next-Web

docker-compose.yml

````yaml
version: '3'

services:
chatgpt-web:
image: yidadaa/chatgpt-next-web
ports:
- 3000:3000
environment:
OPENAI_API_KEY: <Azure OpenAI API Key>
BASE_URL: http://azure-openai:8080
CODE: ""
HIDE_USER_API_KEY: 1
HIDE_BALANCE_QUERY: 1
depends_on:
- azure-openai
links:
- azure-openai
networks:
- chatgpt-ns

azure-openai:
image: stulzq/azure-openai-proxy
ports:
- 8080:8080
environment:
AZURE_OPENAI_ENDPOINT: <Azure OpenAI API Endpoint>
AZURE_OPENAI_MODEL_MAPPER: <Azure OpenAI API Deployment Mapper>
# AZURE_OPENAI_MODEL_MAPPER: gpt-4=gpt-4,gpt-3.5-turbo=gpt-35-turbo
AZURE_OPENAI_API_VER: 2023-07-01-preview
networks:
- chatgpt-ns

networks:
chatgpt-ns:
driver: bridge
````

### Use Config File

The configuration file supports different endpoints and API keys for each model.



config.yaml

````yaml
api_base: "/v1"
deployment_config:
- deployment_name: "xxx"
model_name: "text-davinci-003"
endpoint: "https://xxx-east-us.openai.azure.com/"
api_key: "11111111111"
api_version: "2023-03-15-preview"
- deployment_name: "yyy"
model_name: "gpt-3.5-turbo"
endpoint: "https://yyy.openai.azure.com/"
api_key: "11111111111"
api_version: "2023-03-15-preview"
- deployment_name: "zzzz"
model_name: "text-embedding-ada-002"
endpoint: "https://zzzz.openai.azure.com/"
api_key: "11111111111"
api_version: "2023-03-15-preview"
````



By default, it reads `<workdir>/config.yaml`, and you can pass the path through the parameter `-c config.yaml`.

17 changes: 13 additions & 4 deletions azure/init.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import (
"github.com/stulzq/azure-openai-proxy/util"
"log"
"net/url"
"path/filepath"
"strings"
)

Expand All @@ -32,7 +33,7 @@ func Init() error {
openaiModelMapper = viper.GetString(constant.ENV_AZURE_OPENAI_MODEL_MAPPER)
if endpoint != "" && openaiModelMapper != "" {
if apiVersion == "" {
apiVersion = "2023-03-15-preview"
apiVersion = "2023-07-01-preview"
}
InitFromEnvironmentVariables(apiVersion, endpoint, openaiModelMapper)
} else {
Expand Down Expand Up @@ -92,10 +93,16 @@ func InitFromEnvironmentVariables(apiVersion, endpoint, openaiModelMapper string

func InitFromConfigFile() error {
log.Println("Init from config file")
workDir := util.GetWorkdir()
viper.SetConfigName("config")

configFile := viper.GetString("configFile")
if configFile == "" {
configFile = filepath.Join(util.GetWorkdir(), "config.yaml")
} else if !filepath.IsAbs(configFile) {
configFile = filepath.Join(util.GetWorkdir(), configFile)
}

viper.SetConfigType("yaml")
viper.AddConfigPath(fmt.Sprintf("%s/config", workDir))
viper.SetConfigFile(configFile)
if err := viper.ReadInConfig(); err != nil {
log.Printf("read config file error: %+v\n", err)
return err
Expand All @@ -108,5 +115,7 @@ func InitFromConfigFile() error {
for _, configItem := range C.DeploymentConfig {
ModelDeploymentConfig[configItem.ModelName] = configItem
}

log.Println("read config file success")
return nil
}
18 changes: 13 additions & 5 deletions cmd/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@ package main

import (
"context"
"flag"
"fmt"
"github.com/spf13/pflag"
"github.com/spf13/viper"
"github.com/stulzq/azure-openai-proxy/azure"
"log"
Expand All @@ -26,7 +26,11 @@ func main() {
viper.AutomaticEnv()
parseFlag()

azure.Init()
err := azure.Init()
if err != nil {
panic(err)
}

gin.SetMode(gin.ReleaseMode)
r := gin.Default()
registerRoute(r)
Expand Down Expand Up @@ -59,9 +63,13 @@ func runServer(srv *http.Server) {
}

func parseFlag() {
ver := flag.Bool("v", false, "version")
flag.Parse()
if *ver {
pflag.StringP("configFile", "c", "config.yaml", "config file")
pflag.BoolP("version", "v", false, "version information")
pflag.Parse()
if err := viper.BindPFlags(pflag.CommandLine); err != nil {
panic(err)
}
if viper.GetBool("v") {
fmt.Println("version:", version)
fmt.Println("buildDate:", buildDate)
fmt.Println("gitCommit:", gitCommit)
Expand Down
17 changes: 17 additions & 0 deletions config/config.example.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
api_base: "/v1"
deployment_config:
- deployment_name: "xxx"
model_name: "text-davinci-003"
endpoint: "https://xxx-east-us.openai.azure.com/"
api_key: "11111111111"
api_version: "2023-03-15-preview"
- deployment_name: "yyy"
model_name: "gpt-3.5-turbo"
endpoint: "https://yyy.openai.azure.com/"
api_key: "11111111111"
api_version: "2023-03-15-preview"
- deployment_name: "zzzz"
model_name: "text-embedding-ada-002"
endpoint: "https://zzzz.openai.azure.com/"
api_key: "11111111111"
api_version: "2023-03-15-preview"

0 comments on commit e3cd102

Please sign in to comment.