Skip to content

Commit b94757f

Browse files
committed
openAI clarifications
1 parent 93274cf commit b94757f

4 files changed

+8
-8
lines changed

lab-materials/03/03-01-nb-llm-example.ipynb

+2-2
Original file line numberDiff line numberDiff line change
@@ -70,8 +70,8 @@
7070
"inference_server_url = \"http://llm.ic-shared-llm.svc.cluster.local:8000\"\n",
7171
"\n",
7272
"# LLM definition\n",
73-
"llm = VLLMOpenAI(\n",
74-
" openai_api_key=\"EMPTY\",\n",
73+
"llm = VLLMOpenAI( # we are using the vLLM OpenAI-compatible API client. But the Model is running on OpenShift, not OpenAI.\n",
74+
" openai_api_key=\"EMPTY\", # and that is why we don't need an OpenAI key for this.\n",
7575
" openai_api_base= f\"{inference_server_url}/v1\",\n",
7676
" model_name=\"mistralai/Mistral-7B-Instruct-v0.2\",\n",
7777
" top_p=0.92,\n",

lab-materials/03/03-02-summarization.ipynb

+2-2
Original file line numberDiff line numberDiff line change
@@ -66,8 +66,8 @@
6666
"inference_server_url = \"http://llm.ic-shared-llm.svc.cluster.local:8000\"\n",
6767
"\n",
6868
"# LLM definition\n",
69-
"llm = VLLMOpenAI(\n",
70-
" openai_api_key=\"EMPTY\",\n",
69+
"llm = VLLMOpenAI( # we are using the vLLM OpenAI-compatible API client. But the Model is running on OpenShift, not OpenAI.\n",
70+
" openai_api_key=\"EMPTY\", # and that is why we don't need an OpenAI key for this.\n",
7171
" openai_api_base= f\"{inference_server_url}/v1\",\n",
7272
" model_name=\"mistralai/Mistral-7B-Instruct-v0.2\",\n",
7373
" top_p=0.92,\n",

lab-materials/03/03-03-information-extraction.ipynb

+2-2
Original file line numberDiff line numberDiff line change
@@ -66,8 +66,8 @@
6666
"inference_server_url = \"http://llm.ic-shared-llm.svc.cluster.local:8000\"\n",
6767
"\n",
6868
"# LLM definition\n",
69-
"llm = VLLMOpenAI(\n",
70-
" openai_api_key=\"EMPTY\",\n",
69+
"llm = VLLMOpenAI( # we are using the vLLM OpenAI-compatible API client. But the Model is running on OpenShift, not OpenAI.\n",
70+
" openai_api_key=\"EMPTY\", # and that is why we don't need an OpenAI key for this.\n",
7171
" openai_api_base= f\"{inference_server_url}/v1\",\n",
7272
" model_name=\"mistralai/Mistral-7B-Instruct-v0.2\",\n",
7373
" top_p=0.92,\n",

lab-materials/03/03-04-comparing-model-servers.ipynb

+2-2
Original file line numberDiff line numberDiff line change
@@ -66,8 +66,8 @@
6666
"inference_server_url = \"http://llm.ic-shared-llm.svc.cluster.local:8000\"\n",
6767
"\n",
6868
"# LLM definition\n",
69-
"llm = VLLMOpenAI(\n",
70-
" openai_api_key=\"EMPTY\",\n",
69+
"llm = VLLMOpenAI( # we are using the vLLM OpenAI-compatible API client. But the Model is running on OpenShift, not OpenAI.\n",
70+
" openai_api_key=\"EMPTY\", # and that is why we don't need and OpenAI key for this.\n",
7171
" openai_api_base= f\"{inference_server_url}/v1\",\n",
7272
" model_name=\"mistralai/Mistral-7B-Instruct-v0.2\",\n",
7373
" top_p=0.92,\n",

0 commit comments

Comments
 (0)