-
Notifications
You must be signed in to change notification settings - Fork 384
/
.env_example
137 lines (106 loc) · 6.83 KB
/
.env_example
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
##################################
# The following are used in objects to set values by default
# And are used in notebooks.
# These values can always be overwritten in constructors.
###################################
########################
# In most of our notebook example, an `OpenAIChatTarget` is used.
# This can be initialized for Azure (by default) or OpenAI.
# The following are used to retrieve the key for either.
########################
# To get credentials for openAI
# https://help.openai.com/en/articles/4936850-where-do-i-find-my-openai-api-key
# This is used for all openAI models (including Dall-e, GPT-3, GPT-4, etc.)
# The different models can be specified in OPENAI_DEPLOYMENT or in the target constructor
OPENAI_KEY="sk-xxxxxxxxx"
OPENAI_DEPLOYMENT="default_model_name"
# To get credentials go to
# ms.portal.azure.com > Cognitive Services > Azure OpenAI > Keys and Endpoint
# This serves as the default for Azure OpenAI chat models (used throughout demo notebooks). Adjust as needed.
AZURE_OPENAI_CHAT_ENDPOINT="https://endpoint.openai.azure.com/"
AZURE_OPENAI_CHAT_KEY="<Provide Azure OpenAI key here>"
AZURE_OPENAI_CHAT_DEPLOYMENT="<Provide Azure OpenAI chat deployment name here>"
# Set optional OPENAI_CHAT_ADDITIONAL_REQUEST_HEADERS to include additional HTTP headers in a dictionary format for API requests, e.g., {'key1': 'value1'}.
# This is set for both Azure OpenAI and OpenAI endpoints
OPENAI_CHAT_ADDITIONAL_REQUEST_HEADERS="<Provide headers using a dictionary format. Ex., {'key1':'value1'}>"
##########
# Other default target credentials
##########
# To get credentials go to
# ms.portal.azure.com > Azure ML workspace > Endpoints
# This serves as the default for Azure AI Machine Learning Studio models. Adjust as needed.
AZURE_ML_KEY="<Provide Azure ML API key>"
AZURE_ML_MANAGED_ENDPOINT="https://aml-endpoint.inference.ml.azure.com/score"
# Hugging Face Token
HUGGINGFACE_TOKEN = "<Provide Hugging Face token here>"
# To get credentials go to AzureOpenAI Studio -> Deployments
AZURE_OPENAI_DALLE_ENDPOINT = "<DALLE ENDPOINT HERE>"
AZURE_OPENAI_DALLE_API_KEY = "<API KEY>"
AZURE_OPENAI_DALLE_DEPLOYMENT = "<deployment name>"
# To get credentials go to AzureOpenAI Studio -> Deployments
AZURE_OPENAI_TTS_ENDPOINT = "<TTS ENDPOINT HERE>"
AZURE_OPENAI_TTS_KEY = "<API KEY>"
AZURE_OPENAI_TTS_DEPLOYMENT = "<deployment name>"
# To get keys, go to Azure Portal -> Azure Cognitive Services -> Speech -> Keys and Endpoint
AZURE_SPEECH_REGION = "<region>"
AZURE_SPEECH_KEY = "<API KEY>"
# Azure Content Safety Configuration
AZURE_CONTENT_SAFETY_API_KEY="<Provide Azure Content Safety API key here>"
AZURE_CONTENT_SAFETY_API_ENDPOINT="<Provide Azure Content Safety endpoint here>"
# Crucible API Key. You can get yours at: https://crucible.dreadnode.io/login
CRUCIBLE_API_KEY = "<Provide Crucible API key here>"
#################
# The following are used for Azure SQL memory, which can be configured in place of DuckDB
################
# Azure SQL Server
AZURE_SQL_SERVER_CONNECTION_STRING="<Provide DB Azure SQL Server connection string here>"
# Azure SQL Server Connection String and Azure Storage Account for storing blob objects
AZURE_SQL_DB_CONNECTION_STRING = "<Provide Azure SQL DB connection string here in SQLAlchemy format>"
AZURE_STORAGE_ACCOUNT_RESULTS_CONTAINER_URL="<Azure Storage Account results container url>"
AZURE_STORAGE_ACCOUNT_RESULTS_SAS_TOKEN="<Azure Storage Account Results Container SAS URL>"
# To find these values go to
# ms.portal.azure.com > Storage Account > Storage Browser > Blob containers > ... > Container Properties > URL
# ms.portal.azure.com > Storage Account > Storage Browser > Blob containers > ... > Generate SAS > Blob SAS token
AZURE_STORAGE_ACCOUNT_CONTAINER_URL="https://<storage-account-name>.blob.core.windows.net/<container-name>"
AZURE_STORAGE_ACCOUNT_SAS_TOKEN="<Provide Container SAS Token>"
##############
# The following are not used by default, but can be set here for convenience if you have separate Azure ML model deployments.
# These can be named arbitrarily and are just suggestions people can use when initializing different deployments
##############
AZURE_ML_PHI_3.5_MOE_INSTRUCT_KEY="<Provide Azure ML API key>"
AZURE_ML_PHI_3.5_MOE_INSTRUCT_MANAGED_ENDPOINT="https://aml-endpoint.inference.ml.azure.com/score"
AZURE_ML_MISTRALAI_MIXTRAL_8X7B_INSTRUCT_V01_KEY="<Provide Azure ML API key>"
AZURE_ML_MISTRALAI_MIXTRAL_8X7B_INSTRUCT_V01_MANAGED_ENDPOINT="https://aml-endpoint.inference.ml.azure.com/score"
AZURE_ML_LLAMA_3.1_8B_KEY="<Provide Azure ML API key>"
AZURE_ML_LLAMA_3.1_8B_MANAGED_ENDPOINT="https://aml-endpoint.inference.ml.azure.com/score"
##############
# The following are not used by default, but can be set here for convenience as separate AZURE_OPENAI_CHAT_DEPLOYMENT
# These can be named arbitrarily and are just suggestions people can use when initializing different deployments
##############
AZURE_SCORER_ENDPOINT="https://endpoint.openai.azure.com/"
AZURE_SCORER_CHAT_KEY="<Provide Azure OpenAI key here>"
AZURE_SCORER_DEPLOYMENT="<Provide Azure OpenAI GPT-V chat deployment name here>"
AZURE_OPENAI_GPTV_CHAT_ENDPOINT="https://endpoint.openai.azure.com/"
AZURE_OPENAI_GPTV_CHAT_KEY="<Provide Azure OpenAI key here>"
AZURE_OPENAI_GPTV_CHAT_DEPLOYMENT="<Provide Azure OpenAI GPT-V chat deployment name here>"
AZURE_OPENAI_GPT4O_CHAT_ENDPOINT="https://lion-prod-completion.openai.azure.com/"
AZURE_OPENAI_GPT4O_CHAT_KEY="<Provide Azure OpenAI key here>"
AZURE_OPENAI_GPT4O_CHAT_DEPLOYMENT="<Provide Azure OpenAI GPT4-o chat deployment name here>"
AZURE_OPENAI_GPT4_CHAT_ENDPOINT="https://lion-prod-completion.openai.azure.com/"
AZURE_OPENAI_GPT4_CHAT_KEY="<Provide Azure OpenAI key here>"
AZURE_OPENAI_GPT4_CHAT_DEPLOYMENT="<Provide Azure OpenAI GPT4-o chat deployment name here>"
AZURE_OPENAI_COMPLETION_ENDPOINT="https://completionendpoint.openai.azure.com/"
AZURE_OPENAI_COMPLETION_KEY="<Provide Azure OpenAI completion key here>"
AZURE_OPENAI_COMPLETION_DEPLOYMENT="completion_deployment_name"
AZURE_OPENAI_EMBEDDING_ENDPOINT="https://embeddingendpoint.openai.azure.com/"
AZURE_OPENAI_EMBEDDING_KEY="<Provide Azure OpenAI embedding key here>"
AZURE_OPENAI_EMBEDDING_DEPLOYMENT="<Provide Azure OpenAI embedding deployment name here>"
##############
# Memory labels are a free-form dictionary for tagging prompts with custom labels.
# These labels can be used to query for and score specific groupings of prompts sent as part of an operation.
# Users can define any key-value pairs according to their needs. The below GLOBAL_MEMORY_LABELS will be
# applied to all prompts sent via orchestrators and can be altered whenever needed.
# Example recommended labels are shown below: `username`, `op_name`. Others that may be useful include:
# `language`, `harm_category`, `stage`, or `technique`. These can be added as needed for convenient database queries/scoring.
##############
GLOBAL_MEMORY_LABELS = {"username": "myusername", "op_name": "myop"}