diff --git a/CHANGELOG.md b/CHANGELOG.md index c51c070d..94519551 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Fixed +## [0.62.0] + +### Added +- Added a new Claude 3.5 Haiku model (`claude-3-5-haiku-latest`) and updated the alias `claudeh` with it. +- Added support for XAI's Grok 2 beta model (`grok-beta`) and updated the alias `grok` with it. Set your ENV api key `XAI_API_KEY` to use it. + ## [0.61.0] ### Added diff --git a/Project.toml b/Project.toml index 83aac027..096f7d15 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "PromptingTools" uuid = "670122d1-24a8-4d70-bfce-740807c42192" authors = ["J S @svilupp and contributors"] -version = "0.61.0" +version = "0.62.0" [deps] AbstractTrees = "1520ce14-60c1-5f80-bbc7-55ef81b5835c" diff --git a/src/llm_interface.jl b/src/llm_interface.jl index 6bd62553..7e034b07 100644 --- a/src/llm_interface.jl +++ b/src/llm_interface.jl @@ -262,6 +262,18 @@ Requires one environment variable to be set: """ struct SambaNovaOpenAISchema <: AbstractOpenAISchema end +""" + XAIOpenAISchema + +Schema to call the XAI API. It follows OpenAI API conventions. + +Get your API key from [here](https://console.x.ai/). + +Requires one environment variable to be set: +- `XAI_API_KEY`: Your API key +""" +struct XAIOpenAISchema <: AbstractOpenAISchema end + abstract type AbstractOllamaSchema <: AbstractPromptSchema end """ diff --git a/src/llm_openai_schema_defs.jl b/src/llm_openai_schema_defs.jl index 61ddd664..5fbae52c 100644 --- a/src/llm_openai_schema_defs.jl +++ b/src/llm_openai_schema_defs.jl @@ -205,6 +205,15 @@ function OpenAI.create_chat(schema::SambaNovaOpenAISchema, api_key = isempty(SAMBANOVA_API_KEY) ? api_key : SAMBANOVA_API_KEY OpenAI.create_chat(CustomOpenAISchema(), api_key, model, conversation; url, kwargs...) end +function OpenAI.create_chat(schema::XAIOpenAISchema, + api_key::AbstractString, + model::AbstractString, + conversation; + url::String = "https://api.x.ai/v1", + kwargs...) + api_key = isempty(XAI_API_KEY) ? api_key : XAI_API_KEY + OpenAI.create_chat(CustomOpenAISchema(), api_key, model, conversation; url, kwargs...) +end function OpenAI.create_chat(schema::DatabricksOpenAISchema, api_key::AbstractString, model::AbstractString, @@ -364,6 +373,17 @@ function OpenAI.create_embeddings(schema::FireworksOpenAISchema, base_url = url) OpenAI.create_embeddings(provider, docs, model; kwargs...) end +function OpenAI.create_embeddings(schema::XAIOpenAISchema, + api_key::AbstractString, + docs, + model::AbstractString; + url::String = "https://api.x.ai/v1", + kwargs...) + provider = CustomProvider(; + api_key = isempty(XAI_API_KEY) ? api_key : XAI_API_KEY, + base_url = url) + OpenAI.create_embeddings(provider, docs, model; kwargs...) +end function OpenAI.create_embeddings(schema::AzureOpenAISchema, api_key::AbstractString, docs, diff --git a/src/user_preferences.jl b/src/user_preferences.jl index 960c7a13..cc29819b 100644 --- a/src/user_preferences.jl +++ b/src/user_preferences.jl @@ -26,6 +26,7 @@ Check your preferences by calling `get_preferences(key::String)`. - `OPENROUTER_API_KEY`: The API key for the OpenRouter API. Get yours from [here](https://openrouter.ai/keys). - `CEREBRAS_API_KEY`: The API key for the Cerebras API. Get yours from [here](https://cloud.cerebras.ai/). - `SAMBANOVA_API_KEY`: The API key for the Sambanova API. Get yours from [here](https://cloud.sambanova.ai/apis). +- `XAI_API_KEY`: The API key for the XAI API. Get your key from [here](https://console.x.ai/). - `MODEL_CHAT`: The default model to use for aigenerate and most ai* calls. See `MODEL_REGISTRY` for a list of available models or define your own. - `MODEL_EMBEDDING`: The default model to use for aiembed (embedding documents). See `MODEL_REGISTRY` for a list of available models or define your own. - `PROMPT_SCHEMA`: The default prompt schema to use for aigenerate and most ai* calls (if not specified in `MODEL_REGISTRY`). Set as a string, eg, `"OpenAISchema"`. @@ -60,6 +61,7 @@ Define your `register_model!()` calls in your `startup.jl` file to make them ava - `CEREBRAS_API_KEY`: The API key for the Cerebras API. - `SAMBANOVA_API_KEY`: The API key for the Sambanova API. - `LOG_DIR`: The directory to save the logs to, eg, when using `SaverSchema <: AbstractTracerSchema`. Defaults to `joinpath(pwd(), "log")`. Refer to `?SaverSchema` for more information on how it works and examples. +- `XAI_API_KEY`: The API key for the XAI API. Get your key from [here](https://console.x.ai/). Preferences.jl takes priority over ENV variables, so if you set a preference, it will take precedence over the ENV variable. @@ -84,6 +86,7 @@ const ALLOWED_PREFERENCES = ["MISTRALAI_API_KEY", "OPENROUTER_API_KEY", # Added OPENROUTER_API_KEY "CEREBRAS_API_KEY", "SAMBANOVA_API_KEY", + "XAI_API_KEY", # Added XAI_API_KEY "MODEL_CHAT", "MODEL_EMBEDDING", "MODEL_ALIASES", @@ -169,6 +172,7 @@ global CEREBRAS_API_KEY::String = "" global SAMBANOVA_API_KEY::String = "" global LOCAL_SERVER::String = "" global LOG_DIR::String = "" +global XAI_API_KEY::String = "" # Load them on init "Loads API keys from environment variables and preferences" @@ -236,6 +240,9 @@ function load_api_keys!() global LOG_DIR LOG_DIR = @load_preference("LOG_DIR", default=get(ENV, "LOG_DIR", joinpath(pwd(), "log"))) + global XAI_API_KEY + XAI_API_KEY = @load_preference("XAI_API_KEY", + default=get(ENV, "XAI_API_KEY", "")) return nothing end @@ -415,7 +422,7 @@ aliases = merge( "claude" => "claude-3-5-sonnet-latest", "claudeo" => "claude-3-opus-20240229", "claudes" => "claude-3-5-sonnet-latest", - "claudeh" => "claude-3-haiku-20240307", + "claudeh" => "claude-3-5-haiku-latest", ## Groq "gllama3" => "llama-3.1-8b-instant", "gl3" => "llama-3.1-8b-instant", @@ -454,6 +461,8 @@ aliases = merge( "sls" => "Meta-Llama-3.1-8B-Instruct", # s for small "slm" => "Meta-Llama-3.1-70B-Instruct", # m for medium "sll" => "Meta-Llama-3.1-405B-Instruct", # l for large + ## XAI's Grok + "grok" => "grok-beta", ## DeepSeek "dschat" => "deepseek-chat", "dscode" => "deepseek-coder", @@ -888,11 +897,16 @@ registry = Dict{String, ModelSpec}( 3e-6, 1.5e-5, "Anthropic's middle model Claude 3 Sonnet. Max output 4096 tokens, 200K context. See details [here](https://docs.anthropic.com/claude/docs/models-overview)"), - # "claude-3-5-haiku-latest" => ModelSpec("claude-3-5-haiku-latest", - # AnthropicSchema(), - # 2.5e-7, - # 1.25e-6, - # "Anthropic's smallest and faster model Claude 3 Haiku. Latest version, 200K context. See details [here](https://docs.anthropic.com/claude/docs/models-overview)"), + "claude-3-5-haiku-latest" => ModelSpec("claude-3-5-haiku-latest", + AnthropicSchema(), + 1e-6, + 5e-6, + "Anthropic's smallest and faster model Claude 3 Haiku. Latest version, 200K context. See details [here](https://docs.anthropic.com/claude/docs/models-overview)"), + "claude-3-5-haiku-20241022" => ModelSpec("claude-3-5-haiku-20241022", + AnthropicSchema(), + 1e-6, + 5e-6, + "Anthropic's smallest and faster model Claude 3 Haiku. Version 2024-10-22, 200K context. See details [here](https://docs.anthropic.com/claude/docs/models-overview)"), "claude-3-haiku-20240307" => ModelSpec("claude-3-haiku-20240307", AnthropicSchema(), 2.5e-7, @@ -1058,7 +1072,12 @@ registry = Dict{String, ModelSpec}( SambaNovaOpenAISchema(), 5e-6, 1e-7, - "Meta's Llama3.1 405b, hosted by SambaNova.ai. Max 64K context.") + "Meta's Llama3.1 405b, hosted by SambaNova.ai. Max 64K context."), + "grok-beta" => ModelSpec("grok-beta", + XAIOpenAISchema(), + 5e-6, + 15e-6, + "XAI's Grok 2 beta model. Max 128K context.") ) """