-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Added initial files after removing my license info.
- Loading branch information
Showing
8 changed files
with
564 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,112 @@ | ||
(in-package :synergy) | ||
|
||
(defvar *function-schemas* (make-hash-table :test 'equal)) | ||
|
||
(defun get-schema (function-name) | ||
"Gets the json schema for the function's symbol, might not be a user function" | ||
(gethash function-name *function-schemas*)) | ||
|
||
(defmacro ai-defun (name args description &body body) | ||
"### `ai-defun` | ||
#### What It Does | ||
The `ai-defun` macro defines a function and simultaneously stores a corresponding JSON schema in a hash table. This schema details the function's name, description, and parameters, including their types and any constraints. | ||
This is for sending the function to openai's function interface. | ||
#### How to Use It | ||
Use the `ai-defun` macro in place of `defun` when you want to create a function and generate a JSON schema for it. The syntax is as follows: | ||
```lisp | ||
(ai-defun name args description &body body) | ||
``` | ||
- `name`: The name of the function. | ||
- `args`: A list of the function's arguments. Each argument can be a simple name or a detailed description including the name, type (e.g., \"integer\", \"string\"), a textual description, and a required flag. | ||
- `description`: A textual description of what the function does. | ||
- `&body body`: The actual code of the function. | ||
#### Example | ||
```lisp | ||
(ai-defun order-burger | ||
((num-burgers \"integer\" \"number of burgers to order.\" t) | ||
(fries (\"small\" \"medium\" \"large\" \"none\") \"Would you like fries with that?\" nil)) | ||
\"Order a burger with optional fries.\" | ||
(format t \"Ordering ~a burgers and ~a fries.~%\" num-burgers fries)) | ||
``` | ||
This example defines a function `order-burger`, and the corresponding JSON schema will be stored in the hash table. The function takes two parameters: `num-burgers`, an integer representing the number of burgers to order, and `fries`, a string representing the size of fries. The `num-burgers` parameter is marked as required. | ||
The `ai-defun` macro simplifies the process of documenting functions' APIs, enabling easier integration with other systems that can interpret the JSON schema format." | ||
|
||
(let ((gname name) | ||
(gargs args) | ||
(gdesc description)) | ||
`(progn | ||
(setf (gethash ',gname *function-schemas*) | ||
(create-json-schema (symbol-name ',gname) ',gdesc ',gargs)) | ||
|
||
(setf (symbol-function ',gname) | ||
(lambda (&key | ||
,@(loop for (name . rest) in gargs | ||
collect name)) | ||
,gdesc | ||
,@(loop for i in gargs | ||
when (fourth i) | ||
collect `(unless ,(first i) (error (format nil "Missing required value ~S!" ',(first i))))) | ||
,@body))))) | ||
|
||
(defun argument-to-json-property (arg) | ||
(let* ((name (first arg)) | ||
(type (second arg)) | ||
(description (third arg))) | ||
(if (listp type) | ||
`((,(string-downcase (symbol-name name)) | ||
(:TYPE . "string") | ||
(:ENUM . ,type) | ||
(:DESCRIPTION . ,description))) | ||
`((,(string-downcase (symbol-name name)) | ||
(:TYPE . ,type) | ||
(:DESCRIPTION . ,description)))))) | ||
|
||
(defun create-json-schema (function-name description parameters) | ||
"Creates a JSON Schema for a given function. | ||
**Parameters:** | ||
- `function-name` (string): The name of the function. | ||
- `description` (string): A textual description of the function's purpose. | ||
- `parameters` (list): A list of the function's parameters. Each parameter is represented | ||
by a list containing the following elements: | ||
- Name (string): The parameter's name. | ||
- Type (string): The parameter's type (e.g. \"string\", \"integer\"). | ||
- Description (string): A textual description of the parameter. | ||
- Required (boolean): `T` if the parameter is required, `NIL` otherwise. | ||
**Returns:** | ||
- A string containing the JSON Schema for the function. | ||
**Example Usage:** | ||
```lisp | ||
(create-json-schema \"get_current_weather\" | ||
\"Get the current weather in a given location\" | ||
'((\"location\" \"string\" \"The city and state, e.g. San Francisco, CA\" t) | ||
(\"unit\" \"string\" \"enum\" '(\"celsius\" \"fahrenheit\")))) | ||
" | ||
(let* ((params) | ||
(required)) | ||
(loop for i in parameters | ||
do (let ((prop (argument-to-json-property i))) | ||
(setf params (append prop params)) | ||
(when (fourth i) | ||
(setf required (cons (caar prop) required)) | ||
))) | ||
`((:NAME . ,function-name) | ||
(:DESCRIPTION . ,description) | ||
(:PARAMETERS (:TYPE . "object") | ||
(:PROPERTIES . ,(or (reverse params) (make-hash-table))) | ||
,@(when required | ||
`((:REQUIRED . ,required))))))) | ||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,101 @@ | ||
(in-package :synergy) | ||
|
||
(defun get-model-chat? (alist) | ||
(cdr (assoc :chat alist))) | ||
|
||
(defun get-model-function (alist) | ||
(if (cdr (assoc :chat alist)) | ||
#'openai-chat | ||
#'openai-complete)) | ||
|
||
(defun get-model-data (alist &optional temp) | ||
(let ((model (cdr (assoc :model alist)))) | ||
(when temp | ||
(set-assoc model :temperature temp)) | ||
model)) | ||
|
||
(defun find-best-models (tokenSize &key (smartness nil) (temp 0) (functions nil) (chat)) | ||
;; Filter the models based on the given tokenSize and optionally smartness | ||
(map 'list (lambda (x) | ||
(cons (get-model-data x temp) | ||
(get-model-function x))) | ||
(sort | ||
(loop for i in *models* | ||
if (and (<= tokensize (cdr (assoc :max--tokens i))) | ||
(or (null functions) | ||
(cdr (assoc :functions i))) | ||
(or (null chat) | ||
(cdr (assoc :chat i))) | ||
(or (null smartness) | ||
(<= smartness (cdr (assoc :smartness i))))) | ||
collect i) | ||
|
||
(lambda (a b) | ||
(let ((cost-a (cdr (assoc :costInput a))) | ||
(cost-b (cdr (assoc :costInput b)))) | ||
(if (= cost-a cost-b) | ||
(> (length (cdr (assoc :modelName a))) | ||
(length (cdr (assoc :modelName b)))) | ||
(< (cdr (assoc :costInput a)) | ||
(cdr (assoc :costInput b))))))))) | ||
|
||
(defun find-best-model (tokenSize &key (smartness nil) (temp 0) (functions nil) (chat)) | ||
(let ((model (car (find-best-models tokenSize :smartness smartness :temp temp :functions functions :chat chat)))) | ||
(values (car model) | ||
(cdr model)))) | ||
|
||
|
||
(defun create-openai-chat-model (model &key | ||
(temperature 0) | ||
(top_p 1) | ||
(n 1) | ||
stop | ||
max_tokens | ||
(presence_penalty 0) | ||
(frequency_penalty 0) | ||
logit_bias) | ||
(let* ((params `(("model" . ,model) | ||
("temperature" . ,temperature) | ||
("top_p" . ,top_p) | ||
("n" . ,n) | ||
("stop" . ,stop) | ||
("max_tokens" . ,max_tokens) | ||
("presence_penalty" . ,presence_penalty) | ||
("frequency_penalty" . ,frequency_penalty) | ||
("logit_bias" . ,logit_bias))) | ||
(non-null-params (remove-if (lambda (param) (null (cdr param))) params))) | ||
non-null-params)) | ||
|
||
(defun is-chat (model) | ||
(loop for i in synergy::*models* | ||
do (when (string-equal model (cdr (assoc :modelname i))) | ||
(return-from is-chat (values t #'openai-chat)))) | ||
(values nil #'openai-complete)) | ||
|
||
(defun get-model (name) | ||
(loop for i in *models* | ||
when (string-equal (cdr (assoc :modelName i)) name) | ||
do (return (values (cdr (assoc :model i)) | ||
(if (cdr (assoc :chat i)) | ||
#'openai-chat | ||
#'openai-complete))))) | ||
|
||
|
||
(defparameter *models* '(((:modelName . "gpt-4") (:chat . t) (:max--tokens . 8192) (:smartness . 7) (:costInput . 0.03) (:costOutput . 0.06) (:model . ((:temperature . 0) (:model . "gpt-4") (:max--tokens . 8192)))) | ||
((:modelName . "gpt-4-0613") (:chat . t) (:functions . t) (:max--tokens . 8192) (:smartness . 7) (:costInput . 0.03) (:costOutput . 0.06) (:model . ((:temperature . 0) (:model . "gpt-4-0613") (:max--tokens . 8192)))) | ||
((:modelName . "gpt-3.5-turbo") (:chat . t) (:max--tokens . 4096) (:smartness . 6) (:costInput . 0.0015) (:costOutput . 0.002) (:model . ((:temperature . 0) (:model . "gpt-3.5-turbo") (:max--tokens . 4096)))) | ||
((:modelName . "gpt-3.5-turbo-16k") (:chat . t) (:max--tokens . 16384) (:smartness . 6) (:costInput . 0.003) (:costOutput . 0.004) (:model . ((:temperature . 0) (:model . "gpt-3.5-turbo-16k") (:max--tokens . 16384)))) | ||
((:modelName . "gpt-3.5-turbo-0613") (:chat . t) (:functions . t) (:max--tokens . 4096) (:smartness . 6) (:costInput . 0.0015) (:costOutput . 0.002) (:model . ((:temperature . 0) (:model . "gpt-3.5-turbo-0613") (:max--tokens . 4096)))) | ||
((:modelName . "gpt-3.5-turbo-16k-0613") (:chat . t) (:functions . t) (:max--tokens . 16384) (:smartness . 6) (:costInput . 0.003) (:costOutput . 0.004) (:model . ((:temperature . 0) (:model . "gpt-3.5-turbo-16k-0613") (:max--tokens . 16384)))) | ||
((:modelName . "ada") (:max--tokens . 2049) (:smartness . 3) (:costInput . 0.0004) (:costOutput . 0.0016) (:model . ((:temperature . 0) (:model . "ada") (:max--tokens . 2049)))) | ||
((:modelName . "babbage") (:max--tokens . 2049) (:smartness . 4) (:costInput . 0.0006) (:costOutput . 0.0024) (:model . ((:temperature . 0) (:model . "babbage") (:max--tokens . 2049)))) | ||
((:modelName . "curie") (:max--tokens . 2049) (:smartness . 5) (:costInput . 0.003) (:costOutput . 0.012) (:model . ((:temperature . 0) (:model . "curie") (:max--tokens . 2049)))) | ||
((:modelName . "davinci") (:max--tokens . 2049) (:smartness . 6) (:costInput . 0.03) (:costOutput . 0.12) (:model . ((:temperature . 0) (:model . "davinci") (:max--tokens . 2049)))) | ||
((:modelName . "text-curie-001") (:max--tokens . 2049) (:smartness . 5) (:costInput . 0.003) (:costOutput . 0.012) (:model . ((:temperature . 0) (:model . "text-curie-001") (:max--tokens . 2049)))) | ||
((:modelName . "text-babbage-001") (:max--tokens . 2049) (:smartness . 4) (:costInput . 0.0006) (:costOutput . 0.0024) (:model . ((:temperature . 0) (:model . "text-babbage-001") (:max--tokens . 2049)))) | ||
((:modelName . "text-ada-001") (:max--tokens . 2049) (:smartness . 3) (:costInput . 0.0004) (:costOutput . 0.0016) (:model . ((:temperature . 0) (:model . "text-ada-001") (:max--tokens . 2049)))) | ||
((:modelName . "davinci") (:max--tokens . 2049) (:smartness . 6) (:costInput . 0.03) (:costOutput . 0.12) (:model . ((:temperature . 0) (:model . "davinci") (:max--tokens . 2049)))) | ||
((:modelName . "curie") (:max--tokens . 2049) (:smartness . 5) (:costInput . 0.003) (:costOutput . 0.012) (:model . ((:temperature . 0) (:model . "curie") (:max--tokens . 2049)))) | ||
((:modelName . "babbage") (:max--tokens . 2049) (:smartness . 4) (:costInput . 0.0006) (:costOutput . 0.0024) (:model . ((:temperature . 0) (:model . "babbage") (:max--tokens . 2049)))) | ||
((:modelName . "ada") (:max--tokens . 2049) (:smartness . 3) (:costInput . 0.0004) (:costOutput . 0.0016) (:model . ((:temperature . 0) (:model . "text-ada-001") (:max--tokens . 2049)))))) | ||
|
Oops, something went wrong.