Skip to content

Commit

Permalink
patch: API key to localai service calls (#421)
Browse files Browse the repository at this point in the history
connect #417
  • Loading branch information
timothycarambat authored Dec 11, 2023
1 parent fef5169 commit cba6615
Show file tree
Hide file tree
Showing 4 changed files with 73 additions and 40 deletions.
103 changes: 66 additions & 37 deletions frontend/src/components/EmbeddingSelection/LocalAiOptions/index.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -6,51 +6,80 @@ export default function LocalAiOptions({ settings }) {
settings?.EmbeddingBasePath
);
const [basePath, setBasePath] = useState(settings?.EmbeddingBasePath);
function updateBasePath() {
setBasePath(basePathValue);
}
const [apiKeyValue, setApiKeyValue] = useState(settings?.LocalAiApiKey);
const [apiKey, setApiKey] = useState(settings?.LocalAiApiKey);

return (
<>
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
LocalAI Base URL
</label>
<input
type="url"
name="EmbeddingBasePath"
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="http://localhost:8080/v1"
defaultValue={settings?.EmbeddingBasePath}
onChange={(e) => setBasePathValue(e.target.value)}
onBlur={updateBasePath}
required={true}
autoComplete="off"
spellCheck={false}
<div className="w-full flex items-center gap-4">
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
LocalAI Base URL
</label>
<input
type="url"
name="EmbeddingBasePath"
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="http://localhost:8080/v1"
defaultValue={settings?.EmbeddingBasePath}
onChange={(e) => setBasePathValue(e.target.value)}
onBlur={() => setBasePath(basePathValue)}
required={true}
autoComplete="off"
spellCheck={false}
/>
</div>
<LocalAIModelSelection
settings={settings}
apiKey={apiKey}
basePath={basePath}
/>
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
Max embedding chunk length
</label>
<input
type="number"
name="EmbeddingModelMaxChunkLength"
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="1000"
min={1}
onScroll={(e) => e.target.blur()}
defaultValue={settings?.EmbeddingModelMaxChunkLength}
required={false}
autoComplete="off"
/>
</div>
</div>
<LocalAIModelSelection settings={settings} basePath={basePath} />
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
Max embedding chunk length
</label>
<input
type="number"
name="EmbeddingModelMaxChunkLength"
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="1000"
min={1}
onScroll={(e) => e.target.blur()}
defaultValue={settings?.EmbeddingModelMaxChunkLength}
required={false}
autoComplete="off"
/>
<div className="w-full flex items-center gap-4">
<div className="flex flex-col w-60">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-semibold block">
Local AI API Key
</label>
<p className="text-xs italic text-white/60">
optional API key to use if running LocalAI with API keys.
</p>
</div>

<input
type="password"
name="LocalAiApiKey"
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="sk-mysecretkey"
defaultValue={settings?.LocalAiApiKey ? "*".repeat(20) : ""}
autoComplete="off"
spellCheck={false}
onChange={(e) => setApiKeyValue(e.target.value)}
onBlur={() => setApiKey(apiKeyValue)}
/>
</div>
</div>
</>
);
}

function LocalAIModelSelection({ settings, basePath = null }) {
function LocalAIModelSelection({ settings, apiKey = null, basePath = null }) {
const [customModels, setCustomModels] = useState([]);
const [loading, setLoading] = useState(true);

Expand All @@ -62,12 +91,12 @@ function LocalAIModelSelection({ settings, basePath = null }) {
return;
}
setLoading(true);
const { models } = await System.customModels("localai", null, basePath);
const { models } = await System.customModels("localai", apiKey, basePath);
setCustomModels(models || []);
setLoading(false);
}
findCustomModels();
}, [basePath]);
}, [basePath, apiKey]);

if (loading || customModels.length == 0) {
return (
Expand Down
3 changes: 1 addition & 2 deletions server/models/systemSettings.js
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ const SystemSettings = {
EmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
EmbeddingModelMaxChunkLength:
process.env.EMBEDDING_MODEL_MAX_CHUNK_LENGTH,
LocalAiApiKey: !!process.env.LOCAL_AI_API_KEY,
...(vectorDB === "pinecone"
? {
PineConeEnvironment: process.env.PINECONE_ENVIRONMENT,
Expand Down Expand Up @@ -98,13 +99,11 @@ const SystemSettings = {
AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
}
: {}),

...(llmProvider === "localai"
? {
LocalAiBasePath: process.env.LOCAL_AI_BASE_PATH,
LocalAiModelPref: process.env.LOCAL_AI_MODEL_PREF,
LocalAiTokenLimit: process.env.LOCAL_AI_MODEL_TOKEN_LIMIT,
LocalAiApiKey: !!process.env.LOCAL_AI_API_KEY,

// For embedding credentials when localai is selected.
OpenAiKey: !!process.env.OPEN_AI_KEY,
Expand Down
5 changes: 5 additions & 0 deletions server/utils/EmbeddingEngines/localAi/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,11 @@ class LocalAiEmbedder {
throw new Error("No embedding model was set.");
const config = new Configuration({
basePath: process.env.EMBEDDING_BASE_PATH,
...(!!process.env.LOCAL_AI_API_KEY
? {
apiKey: process.env.LOCAL_AI_API_KEY,
}
: {}),
});
this.openai = new OpenAIApi(config);

Expand Down
2 changes: 1 addition & 1 deletion server/utils/helpers/customModels.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ async function getCustomModels(provider = "", apiKey = null, basePath = null) {
case "openai":
return await openAiModels(apiKey);
case "localai":
return await localAIModels(basePath);
return await localAIModels(basePath, apiKey);
case "native-llm":
return nativeLLMModels();
default:
Expand Down

0 comments on commit cba6615

Please sign in to comment.