-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathmy-assistants.el
171 lines (153 loc) · 7.17 KB
/
my-assistants.el
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
;;; my-assistants --- integration of LLMs and similar tools
;;
;;; Commentary:
;;
;; This is the configuration of various digital assistants namely LLMs
;; and other similar tools.
;;
;;; Code:
;; AI!
(use-package pcsv
:ensure t)
;;
;; llm provides an abstraction for various local and remote LLM APIs
;; which is then used by the slowly growing number of front-ends there
;; are.
;;
(use-package llm
:ensure t)
(use-package llm-openai
:config (setq my-gpt4
(make-llm-openai :key (my-pass-password "api.openai.com")
:chat-model "gpt-4o")
; cheaper version of gpt4
my-gpt4-mini
(make-llm-openai :key (my-pass-password "api.openai.com")
:chat-model "gpt-4o-mini")
; aliases to the latest chatgpt model
my-chatgpt
(make-llm-openai :key (my-pass-password "api.openai.com")
:chat-model "chatgpt-4o-latest")
; broad general reasoning model, expensive
my-openai-o1
(make-llm-openai :key (my-pass-password "api.openai.com")
:chat-model "o1")
; cheaper version of o1 focused on coding, math,science
my-openai-o1-mini
(make-llm-openai :key (my-pass-password "api.openai.com")
:chat-model "o1-mini")))
(use-package llm-ollama
:config (setq my-ollama-codellama
(make-llm-ollama :chat-model "codellama")
my-ollama-mistral
(make-llm-ollama :chat-model "mistrel")))
(use-package llm-gemini
:config (setq my-gemini-llm
(make-llm-gemini :key (my-pass-password "api.gemini.google.com")
:chat-model
"gemini-1.5-flash")
my-gemini-pro-llm
(make-llm-gemini :key (my-pass-password "api.gemini.google.com")
:chat-model "gemini-1.5-pro-latest")
my-gemini-2-flash-llm
(make-llm-gemini :key (my-pass-password "api.gemini.google.com")
:chat-model "gemini-2.0-flash-exp")
my-gemma2-llm
(make-llm-gemini :key (my-pass-password "api.gemini.google.com")
:chat-model "gemma-2-27b-it")))
;;
;; The OG ChatGpt integration
;;
(defun my-project-name (project)
"Return the name of the current project."
(file-name-sans-extension
(file-name-nondirectory (directory-file-name (cdr project)))))
(defun my-define-project-prompt ()
"Defined a ChatGPT prompt tailored for the current project."
(let* ((project (project-current t))
(proj-name (my-project-name project))
(origin-url (magit-get "remote" "origin" "url"))
(prompt-title (format "Project assistant for %s" proj-name))
(prompt (concat
"Your goal is to help the user working on the current project.\n"
(format "the project is in the directory %s and its origin repo is %s\n" proj-name origin-url)
"You are positive and encouraging.\n"
"You do not repeat obvious things, including their query.\n"
"You are as concise in responses. You always guide the user go one level deeper and help them see patterns.\n"
"You never apologise for confusions because it would waste their time.\n"
"You use markdown liberally to structure responses.\n"
"Always show code snippets in markdown blocks with language labels.\n"
"Don't explain code snippets.\n"
"Whenever you output updated code for the user, only show diffs,instead of entire snippets.\n"
"You can assume the user is running on Debian Linux and is using Emacs as their editor\n")))
(add-to-list 'chatgpt-shell-system-prompts `(,prompt-title . ,prompt))))
(use-package chatgpt-shell
:load-path (lambda () (my-return-path-if-ok
"~/src/emacs/chatgpt-shell.git/"))
:config (setq
chatgpt-shell-chatgpt-model-version "gpt-4"
chatgpt-shell-openai-key '(lambda () (my-pass-password "api.openai.com"))))
; "code-davinci-edit-001"
; gpt-3.5-turbo is the cheaper faster one
(use-package dall-e-shell
:load-path (lambda () (my-return-path-if-ok
"~/src/emacs/chatgpt-shell.git/"))
:config (setq dall-e-shell-openai-key '(lambda () (my-pass-password "api.openai.com"))))
;;
;; Ellama
;;
(use-package ellama
:ensure t
:init (setopt ellama-language "English"
ellama-provider my-gemini-llm
ellama-providers
'(
;; Google Gemini Models
("Gemini Pro" . my-gemini-pro-llm)
("Gemini" . my-gemini-llm)
("Gemini 2 Flash (experimental)" . my-gemini-2-flash-llm)
("Gemma2" . my-gemma2-llm)
;; OpenAI Models
("ChatGPT (latest)" . my-chatgpt)
("OpenAI GPT4o" . my-gpt4)
("OpenAI GPT4o-mini" . my-gpt4-mini)
("OpenAI o1" . my-openai-o1)
("OpenAI o1-mini" . my-openai-o1-mini))
ellama-keymap-prefix "C-c C-l l"))
;;
;; Codeium
;;;
;; https://github.com/Exafunction/codeium.el/issues/97#issuecomment-2354092579
(defun my-codeium-wrapper ()
"Decouple codeium from other completions"
(interactive)
(cape-interactive #'codeium-completion-at-point))
(use-package codeium
:load-path (lambda () (my-return-path-if-ok
"~/src/emacs/codeium.el.git/"))
:bind (:map prog-mode-map
("C-x c" . my-codeium-wrapper))
:config
(setq codeium/metadata/api_key (my-pass-password "api.codeium.com")
codeium-mode-line-enable (lambda (api)
(not (memq api '(CancelRequest Heartbeat AcceptCompletion)))))
(add-to-list 'mode-line-format '(:eval (car-safe codeium-mode-line)) t)
;; alternatively for a more extensive mode-line
;; (add-to-list 'mode-line-format '(-50 "" codeium-mode-line) t)
;; use M-x codeium-diagnose to see apis/fields that would be sent to the local language server
(setq codeium-api-enabled
(lambda (api)
(memq api '(GetCompletions Heartbeat CancelRequest GetAuthToken RegisterUser auth-redirect AcceptCompletion))))
;; You can overwrite all the codeium configs!
;; for example, we recommend limiting the string sent to codeium for better performance
(defun my-codeium/document/text ()
(buffer-substring-no-properties (max (- (point) 3000) (point-min)) (min (+ (point) 1000) (point-max))))
;; if you change the text, you should also change the cursor_offset
;; warning: this is measured by UTF-8 encoded bytes
(defun my-codeium/document/cursor_offset ()
(codeium-utf8-byte-length
(buffer-substring-no-properties (max (- (point) 3000) (point-min)) (point))))
(setq codeium/document/text 'my-codeium/document/text
codeium/document/cursor_offset 'my-codeium/document/cursor_offset))
(provide 'my-assistants)
;;; my-assistants.el ends here