Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix specs and examples #211

Merged
merged 8 commits into from
Dec 13, 2024
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 8 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,11 @@
Elixir LangChain enables Elixir applications to integrate AI services and self-hosted models into an application.

Currently supported AI services:

- OpenAI ChatGPT
- OpenAI DALL-e 2 - image generation
- Anthropic Claude
- Google AI - https://generativelanguage.googleapis.com
- Google AI - <https://generativelanguage.googleapis.com>
vkryukov marked this conversation as resolved.
Show resolved Hide resolved
- Google Vertex AI - Gemini
- Ollama
- Mistral
Expand Down Expand Up @@ -114,6 +115,7 @@ fly secrets set ANTHROPIC_API_KEY=MyAnthropicApiKey
```

A list of models to use:

- [Anthropic Claude models](https://docs.anthropic.com/en/docs/about-claude/models)
- [OpenAI models](https://platform.openai.com/docs/models)
- [Gemini AI models](https://ai.google.dev/gemini-api/docs/models/gemini)
Expand All @@ -139,6 +141,7 @@ alias LangChain.Function
alias LangChain.Message
alias LangChain.Chains.LLMChain
alias LangChain.ChatModels.ChatOpenAI
alias LangChain.Utils.ChainResult

# map of data we want to be passed as `context` to the function when
# executed.
Expand Down Expand Up @@ -171,7 +174,7 @@ custom_fn =
})

# create and run the chain
{:ok, updated_chain, %Message{} = message} =
{:ok, updated_chain}} =
LLMChain.new!(%{
llm: ChatOpenAI.new!(),
custom_context: custom_context,
Expand All @@ -182,8 +185,8 @@ custom_fn =
|> LLMChain.run(mode: :while_needs_response)

# print the LLM's answer
IO.puts(message.content)
#=> "The hairbrush is located in the drawer."
IO.puts(update |> ChainResult.to_string())
# => "The hairbrush is located in the drawer."
vkryukov marked this conversation as resolved.
Show resolved Hide resolved
```

### Alternative OpenAI compatible APIs
Expand All @@ -193,7 +196,7 @@ There are several services or self-hosted applications that provide an OpenAI co
For example, if a locally running service provided that feature, the following code could connect to the service:

```elixir
{:ok, updated_chain, %Message{} = message} =
{:ok, updated_chain} =
LLMChain.new!(%{
llm: ChatOpenAI.new!(%{endpoint: "http://localhost:1234/v1/chat/completions"}),
})
Expand Down Expand Up @@ -243,4 +246,3 @@ Executing a specific test, whether it is a `live_call` or not, will execute it c
When doing local development on the `LangChain` library itself, rename the `.envrc_template` to `.envrc` and populate it with your private API values. This is only used when running live test when explicitly requested.

Use a tool like [Direnv](https://direnv.net/) or [Dotenv](https://github.com/motdotla/dotenv) to load the API values into the ENV when using the library locally.

2 changes: 1 addition & 1 deletion lib/chains/llm_chain.ex
Original file line number Diff line number Diff line change
Expand Up @@ -289,7 +289,7 @@ defmodule LangChain.Chains.LLMChain do
Run the chain on the LLM using messages and any registered functions. This
formats the request for a ChatLLMChain where messages are passed to the API.

When successful, it returns `{:ok, updated_chain, message_or_messages}`
When successful, it returns `{:ok, updated_chain}`

## Options

Expand Down
2 changes: 1 addition & 1 deletion lib/chains/routing_chain.ex
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ defmodule LangChain.Chains.RoutingChain do
route.
"""
@spec run(t(), Keyword.t()) ::
{:ok, LLMChain.t(), Message.t() | [Message.t()]} | {:error, LangChainError.t()}
{:ok, LLMChain.t()} | {:error, LLMChain.t(), LangChainError.t()}
def run(%RoutingChain{} = chain, opts \\ []) do
default_name = chain.default_route.name

Expand Down
15 changes: 9 additions & 6 deletions lib/chat_models/chat_anthropic.ex
Original file line number Diff line number Diff line change
Expand Up @@ -334,8 +334,8 @@ defmodule LangChain.ChatModels.ChatAnthropic do
#
# Retries the request up to 3 times on transient errors with a 1 second delay
@doc false
@spec do_api_request(t(), [Message.t()], ChatModel.tools(), (any() -> any())) ::
list() | struct() | {:error, LangChainError.t()}
@spec do_api_request(t(), [Message.t()], ChatModel.tools(), non_neg_integer()) ::
list() | struct() | {:error, LangChainError.t()} | no_return()
def do_api_request(anthropic, messages, tools, retry_count \\ 3)

def do_api_request(_anthropic, _messages, _functions, 0) do
Expand Down Expand Up @@ -435,13 +435,14 @@ defmodule LangChain.ChatModels.ChatAnthropic do

data

# The error tuple was successfully received from the API. Unwrap it and
# return it as an error.
# The error tuple was successfully received from the API. Unwrap it and
# return it as an error.
{:ok, {:error, %LangChainError{} = error}} ->
{:error, error}

{:error, %Req.TransportError{reason: :timeout} = err} ->
{:error, LangChainError.exception(type: "timeout", message: "Request timed out", original: err)}
{:error,
LangChainError.exception(type: "timeout", message: "Request timed out", original: err)}

{:error, %Req.TransportError{reason: :closed}} ->
# Force a retry by making a recursive call decrementing the counter
Expand Down Expand Up @@ -623,7 +624,9 @@ defmodule LangChain.ChatModels.ChatAnthropic do
LangChainError.exception(type: "invalid_json", message: error_message, original: response)}
end

def do_process_response(%ChatAnthropic{bedrock: %BedrockConfig{}}, %{"message" => "Too many requests" <> _rest = message}) do
def do_process_response(%ChatAnthropic{bedrock: %BedrockConfig{}}, %{
"message" => "Too many requests" <> _rest = message
}) do
# the error isn't wrapped in an error JSON object. tsk, tsk
{:error, LangChainError.exception(type: "too_many_requests", message: message)}
end
Expand Down
4 changes: 2 additions & 2 deletions lib/langchain_error.ex
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ defmodule LangChain.LangChainError do
Create the exception using either a message or a changeset who's errors are
converted to a message.
"""
@spec exception(message :: String.t() | Ecto.Changeset.t()) :: t() | no_return()
@spec exception(message :: String.t() | Ecto.Changeset.t() | keyword()) :: t() | no_return()
vkryukov marked this conversation as resolved.
Show resolved Hide resolved
def exception(message) when is_binary(message) do
%LangChainError{message: message}
end
Expand All @@ -44,7 +44,7 @@ defmodule LangChain.LangChainError do
%LangChainError{
message: Keyword.fetch!(opts, :message),
type: Keyword.get(opts, :type),
original: Keyword.get(opts, :original),
original: Keyword.get(opts, :original)
}
end
end
2 changes: 1 addition & 1 deletion lib/prompt_template.ex
Original file line number Diff line number Diff line change
Expand Up @@ -323,7 +323,7 @@ defmodule LangChain.PromptTemplate do
content. Raises an exception if invalid.
"""
@spec to_content_part!(t(), input :: %{atom() => any()}) ::
{:ok, Message.t()} | {:error, Ecto.Changeset.t()}
ContentPart.t() | no_return()
def to_content_part!(%PromptTemplate{} = template, %{} = inputs \\ %{}) do
content = PromptTemplate.format(template, inputs)
ContentPart.new!(%{type: :text, content: content})
Expand Down
3 changes: 2 additions & 1 deletion lib/utils/chain_result.ex
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,8 @@ defmodule LangChain.Utils.ChainResult do
@doc """
Write the result to the given map as the value of the given key.
"""
@spec to_map(LLMChain.t(), map(), any()) :: {:ok, map()} | {:error, String.t()}
@spec to_map(LLMChain.t(), map(), any()) ::
{:ok, map()} | {:error, LLMChain.t(), LangChainError.t()}
def to_map(%LLMChain{} = chain, map, key) do
case ChainResult.to_string(chain) do
{:ok, value} ->
Expand Down
2 changes: 1 addition & 1 deletion lib/utils/chat_templates.ex
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ defmodule LangChain.Utils.ChatTemplates do
- Alternates message roles between: user, assistant, user, assistant, etc.
"""
@spec prep_and_validate_messages([Message.t()]) ::
{Message.t(), Message.t(), [Message.t()]} | no_return()
{Message.t() | nil, Message.t(), [Message.t()]} | no_return()
def prep_and_validate_messages(messages) do
{system, first_user, rest} =
case messages do
Expand Down