diff --git a/docker/.env.example b/docker/.env.example
index 74ab3ef62c..f5bf26aee3 100644
--- a/docker/.env.example
+++ b/docker/.env.example
@@ -27,6 +27,7 @@ CACHE_VECTORS="true"
# LOCAL_AI_BASE_PATH='http://host.docker.internal:8080/v1'
# LOCAL_AI_MODEL_PREF='luna-ai-llama2'
# LOCAL_AI_MODEL_TOKEN_LIMIT=4096
+# LOCAL_AI_API_KEY="sk-123abc"
###########################################
######## Embedding API SElECTION ##########
diff --git a/frontend/src/components/LLMSelection/LocalAiOptions/index.jsx b/frontend/src/components/LLMSelection/LocalAiOptions/index.jsx
index a09a47d7c8..8adad7e50e 100644
--- a/frontend/src/components/LLMSelection/LocalAiOptions/index.jsx
+++ b/frontend/src/components/LLMSelection/LocalAiOptions/index.jsx
@@ -6,12 +6,11 @@ import System from "../../../models/system";
export default function LocalAiOptions({ settings, showAlert = false }) {
const [basePathValue, setBasePathValue] = useState(settings?.LocalAiBasePath);
const [basePath, setBasePath] = useState(settings?.LocalAiBasePath);
- function updateBasePath() {
- setBasePath(basePathValue);
- }
+ const [apiKeyValue, setApiKeyValue] = useState(settings?.LocalAiApiKey);
+ const [apiKey, setApiKey] = useState(settings?.LocalAiApiKey);
return (
-
+
{showAlert && (
@@ -44,10 +43,14 @@ export default function LocalAiOptions({ settings, showAlert = false }) {
autoComplete="off"
spellCheck={false}
onChange={(e) => setBasePathValue(e.target.value)}
- onBlur={updateBasePath}
+ onBlur={() => setBasePath(basePathValue)}
/>
-
+
+
+
+
+
+
+ optional API key to use if running LocalAI with API keys.
+
+
+
+
setApiKeyValue(e.target.value)}
+ onBlur={() => setApiKey(apiKeyValue)}
+ />
+
+
);
}
-function LocalAIModelSelection({ settings, basePath = null }) {
+function LocalAIModelSelection({ settings, basePath = null, apiKey = null }) {
const [customModels, setCustomModels] = useState([]);
const [loading, setLoading] = useState(true);
@@ -81,12 +108,12 @@ function LocalAIModelSelection({ settings, basePath = null }) {
return;
}
setLoading(true);
- const { models } = await System.customModels("localai", null, basePath);
+ const { models } = await System.customModels("localai", apiKey, basePath);
setCustomModels(models || []);
setLoading(false);
}
findCustomModels();
- }, [basePath]);
+ }, [basePath, apiKey]);
if (loading || customModels.length == 0) {
return (
diff --git a/server/.env.example b/server/.env.example
index 03d1eb9bce..f83c5e72a6 100644
--- a/server/.env.example
+++ b/server/.env.example
@@ -27,6 +27,7 @@ JWT_SECRET="my-random-string-for-seeding" # Please generate random string at lea
# LOCAL_AI_BASE_PATH='http://localhost:8080/v1'
# LOCAL_AI_MODEL_PREF='luna-ai-llama2'
# LOCAL_AI_MODEL_TOKEN_LIMIT=4096
+# LOCAL_AI_API_KEY="sk-123abc"
###########################################
######## Embedding API SElECTION ##########
diff --git a/server/endpoints/system.js b/server/endpoints/system.js
index fd07d03c09..22ce8ef107 100644
--- a/server/endpoints/system.js
+++ b/server/endpoints/system.js
@@ -8,7 +8,7 @@ const {
acceptedFileTypes,
} = require("../utils/files/documentProcessor");
const { purgeDocument } = require("../utils/files/purgeDocument");
-const { getVectorDbClass, getLLMProvider } = require("../utils/helpers");
+const { getVectorDbClass } = require("../utils/helpers");
const { updateENV, dumpENV } = require("../utils/helpers/updateENV");
const {
reqBody,
diff --git a/server/models/systemSettings.js b/server/models/systemSettings.js
index ec40cb7f18..7556d8f224 100644
--- a/server/models/systemSettings.js
+++ b/server/models/systemSettings.js
@@ -103,6 +103,7 @@ const SystemSettings = {
LocalAiBasePath: process.env.LOCAL_AI_BASE_PATH,
LocalAiModelPref: process.env.LOCAL_AI_MODEL_PREF,
LocalAiTokenLimit: process.env.LOCAL_AI_MODEL_TOKEN_LIMIT,
+ LocalAiApiKey: !!process.env.LOCAL_AI_API_KEY,
// For embedding credentials when localai is selected.
OpenAiKey: !!process.env.OPEN_AI_KEY,
diff --git a/server/utils/AiProviders/localAi/index.js b/server/utils/AiProviders/localAi/index.js
index 616213a236..6c7a3263fb 100644
--- a/server/utils/AiProviders/localAi/index.js
+++ b/server/utils/AiProviders/localAi/index.js
@@ -8,6 +8,11 @@ class LocalAiLLM {
const { Configuration, OpenAIApi } = require("openai");
const config = new Configuration({
basePath: process.env.LOCAL_AI_BASE_PATH,
+ ...(!!process.env.LOCAL_AI_API_KEY
+ ? {
+ apiKey: process.env.LOCAL_AI_API_KEY,
+ }
+ : {}),
});
this.openai = new OpenAIApi(config);
this.model = process.env.LOCAL_AI_MODEL_PREF;
diff --git a/server/utils/helpers/customModels.js b/server/utils/helpers/customModels.js
index e5bc1fcfb5..cd6adccafd 100644
--- a/server/utils/helpers/customModels.js
+++ b/server/utils/helpers/customModels.js
@@ -35,10 +35,11 @@ async function openAiModels(apiKey = null) {
return { models, error: null };
}
-async function localAIModels(basePath = null) {
+async function localAIModels(basePath = null, apiKey = null) {
const { Configuration, OpenAIApi } = require("openai");
const config = new Configuration({
basePath,
+ ...(!!apiKey ? { apiKey } : {}),
});
const openai = new OpenAIApi(config);
const models = await openai
diff --git a/server/utils/helpers/updateENV.js b/server/utils/helpers/updateENV.js
index b7ecffa140..6e0b84970b 100644
--- a/server/utils/helpers/updateENV.js
+++ b/server/utils/helpers/updateENV.js
@@ -67,6 +67,10 @@ const KEY_MAPPING = {
envKey: "LOCAL_AI_MODEL_TOKEN_LIMIT",
checks: [nonZero],
},
+ LocalAiApiKey: {
+ envKey: "LOCAL_AI_API_KEY",
+ checks: [],
+ },
EmbeddingEngine: {
envKey: "EMBEDDING_ENGINE",
diff --git a/server/utils/http/index.js b/server/utils/http/index.js
index 5b61236f05..cb57c4a289 100644
--- a/server/utils/http/index.js
+++ b/server/utils/http/index.js
@@ -52,6 +52,13 @@ function multiUserMode(response) {
return response?.locals?.multiUserMode;
}
+function parseAuthHeader(headerValue = null, apiKey = null) {
+ if (headerValue === null || apiKey === null) return {};
+ if (headerValue === "Authorization")
+ return { Authorization: `Bearer ${apiKey}` };
+ return { [headerValue]: apiKey };
+}
+
module.exports = {
reqBody,
multiUserMode,
@@ -59,4 +66,5 @@ module.exports = {
makeJWT,
decodeJWT,
userFromSession,
+ parseAuthHeader,
};
diff --git a/server/utils/vectorDbProviders/chroma/index.js b/server/utils/vectorDbProviders/chroma/index.js
index c2f0257dd5..0e75fa07fb 100644
--- a/server/utils/vectorDbProviders/chroma/index.js
+++ b/server/utils/vectorDbProviders/chroma/index.js
@@ -15,10 +15,10 @@ const Chroma = {
...(!!process.env.CHROMA_API_HEADER && !!process.env.CHROMA_API_KEY
? {
fetchOptions: {
- headers: {
- [process.env.CHROMA_API_HEADER || "X-Api-Key"]:
- process.env.CHROMA_API_KEY,
- },
+ headers: parseAuthHeader(
+ process.env.CHROMA_API_HEADER || "X-Api-Key",
+ process.env.CHROMA_API_KEY
+ ),
},
}
: {}),