From 7cea8f05d2a4679effac3df98415bbb3574ce268 Mon Sep 17 00:00:00 2001 From: Cedric Date: Sat, 6 Apr 2024 16:18:53 +0200 Subject: [PATCH] feat: add new parameter to pass models config files --- .../templates/configmap-models-configs.yaml | 11 ++++++++ charts/local-ai/templates/deployment.yaml | 28 +++++++++++++++++++ charts/local-ai/values.yaml | 22 +++++++++++++++ 3 files changed, 61 insertions(+) create mode 100644 charts/local-ai/templates/configmap-models-configs.yaml diff --git a/charts/local-ai/templates/configmap-models-configs.yaml b/charts/local-ai/templates/configmap-models-configs.yaml new file mode 100644 index 0000000..bc55d62 --- /dev/null +++ b/charts/local-ai/templates/configmap-models-configs.yaml @@ -0,0 +1,11 @@ +{{- if .Values.modelsConfigs -}} +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ template "local-ai.fullname" . }}-models-configs +data: +{{- range $key, $val := .Values.modelsConfigs }} + {{ $key }}: |- +{{ $val | indent 4 }} +{{- end }} +{{- end -}} diff --git a/charts/local-ai/templates/deployment.yaml b/charts/local-ai/templates/deployment.yaml index d8dcbe4..3435e5a 100644 --- a/charts/local-ai/templates/deployment.yaml +++ b/charts/local-ai/templates/deployment.yaml @@ -96,6 +96,31 @@ spec: {{- end }} {{- end }} {{- end }} + {{- if .Values.modelsConfigs }} + - name: models-configs + image: {{ .Values.deployment.prompt_templates.image }} + imagePullPolicy: {{ .Values.deployment.pullPolicy }} + command: ["/bin/sh", "-c"] + args: + - | + for file in /models-configs/*; do + filename=$(basename "$file") + if [[ $filename != *.yaml ]]; then + cp -fL "$file" "/models/$filename.yaml" + else + cp -fL "$file" "/models/$filename" + fi + done + volumeMounts: + - mountPath: /models-configs + name: models-configs + {{- range $key, $pvc := $rootPersistence }} + {{- if $pvc.enabled }} + - name: {{ $key }} + mountPath: {{ $pvc.globalMount | default (print "/" $key) }} + {{- end }} + {{- end }} + {{- end }} - name: download-model image: {{ .Values.deployment.download_model.image }} imagePullPolicy: {{ .Values.deployment.pullPolicy }} @@ -261,6 +286,9 @@ spec: - name: prompt-templates configMap: name: {{ template "local-ai.fullname" . }}-prompt-templates + - name: models-configs + configMap: + name: {{ template "local-ai.fullname" . }}-models-configs {{- end }} {{- with .Values.nodeSelector }} nodeSelector: diff --git a/charts/local-ai/values.yaml b/charts/local-ai/values.yaml index b1dbbe1..d8830df 100644 --- a/charts/local-ai/values.yaml +++ b/charts/local-ai/values.yaml @@ -34,6 +34,28 @@ resources: # cpu: 100m # memory: 128Mi +# Model config to include +modelsConfigs: + {} + # phi-2: | + # name: phi-2 + # context_size: 2048 + # f16: true + # mmap: true + # trimsuffix: + # - "\n" + # parameters: + # model: phi-2.Q8_0.gguf + # temperature: 0.2 + # top_k: 40 + # top_p: 0.95 + # seed: -1 + # template: + # chat: &template |- + # Instruct: {{.Input}} + # Output: + # completion: *template + # Prompt templates to include # Note: the keys of this map will be the names of the prompt template files promptTemplates: