Skip to content

Commit

Permalink
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
improved error handling
Browse files Browse the repository at this point in the history
- handle Antrhropic "overload_error"
- return {:error, %LangChainError{}} instead of a {:error, String.t()}
- updated lots of chat models and chains
- LLMChain.run returns an error struct
- LangChainError.type is a string code that is easier to detect and have coded handling for more scenarios
brainlid committed Nov 21, 2024
1 parent 887e025 commit d5d9068
Showing 23 changed files with 473 additions and 246 deletions.
7 changes: 4 additions & 3 deletions lib/chains/data_extraction_chain.ex
Original file line number Diff line number Diff line change
@@ -73,6 +73,7 @@ defmodule LangChain.Chains.DataExtractionChain do
alias LangChain.PromptTemplate
alias LangChain.Message
alias LangChain.Message.ToolCall
alias LangChain.LangChainError
alias LangChain.Chains.LLMChain
alias LangChain.ChatModels.ChatOpenAI

@@ -86,7 +87,7 @@ Passage:
Run the data extraction chain.
"""
@spec run(ChatOpenAI.t(), json_schema :: map(), prompt :: [any()], opts :: Keyword.t()) ::
{:ok, result :: [any()]} | {:error, String.t()}
{:ok, result :: [any()]} | {:error, LangChainError.t()}
def run(llm, json_schema, prompt, opts \\ []) do
verbose = Keyword.get(opts, :verbose, false)

@@ -123,15 +124,15 @@ Passage:
{:ok, info}

other ->
{:error, "Unexpected response. #{inspect(other)}"}
{:error, LangChainError.exception("Unexpected response. #{inspect(other)}")}
end
rescue
exception ->
Logger.warning(
"Caught unexpected exception in DataExtractionChain. Error: #{inspect(exception)}"
)

{:error, "Unexpected error in DataExtractionChain. Check logs for details."}
{:error, LangChainError.exception("Unexpected error in DataExtractionChain. Check logs for details.")}
end
end

30 changes: 22 additions & 8 deletions lib/chains/llm_chain.ex
Original file line number Diff line number Diff line change
@@ -236,7 +236,7 @@ defmodule LangChain.Chains.LLMChain do
an opportunity to use the `ToolResult` information in an assistant response
message. In essence, this mode always gives the LLM the last word.
"""
@spec run(t(), Keyword.t()) :: {:ok, t()} | {:error, t(), String.t()}
@spec run(t(), Keyword.t()) :: {:ok, t()} | {:error, t(), LangChainError.t()}
def run(chain, opts \\ [])

def run(%LLMChain{} = chain, opts) do
@@ -275,12 +275,16 @@ defmodule LangChain.Chains.LLMChain do
# Repeatedly run the chain until we get a successful ToolResponse or processed
# assistant message. Once we've reached success, it is not submitted back to the LLM,
# the process ends there.
@spec run_until_success(t()) :: {:ok, t()} | {:error, t(), String.t()}
@spec run_until_success(t()) :: {:ok, t()} | {:error, t(), LangChainError.t()}
defp run_until_success(%LLMChain{last_message: %Message{} = last_message} = chain) do
stop_or_recurse =
cond do
chain.current_failure_count >= chain.max_retry_count ->
{:error, chain, "Exceeded max failure count"}
{:error, chain,
LangChainError.exception(
type: "exceeded_failure_count",
message: "Exceeded max failure count"
)}

last_message.role == :tool && !Message.tool_had_errors?(last_message) ->
# a successful tool result has no errors
@@ -318,7 +322,7 @@ defmodule LangChain.Chains.LLMChain do
# Repeatedly run the chain while `needs_response` is true. This will execute
# tools and re-submit the tool result to the LLM giving the LLM an
# opportunity to execute more tools or return a response.
@spec run_while_needs_response(t()) :: {:ok, t()} | {:error, t(), String.t()}
@spec run_while_needs_response(t()) :: {:ok, t()} | {:error, t(), LangChainError.t()}
defp run_while_needs_response(%LLMChain{needs_response: false} = chain) do
{:ok, chain}
end
@@ -337,11 +341,16 @@ defmodule LangChain.Chains.LLMChain do
end

# internal reusable function for running the chain
@spec do_run(t()) :: {:ok, t()} | {:error, t(), String.t()}
@spec do_run(t()) :: {:ok, t()} | {:error, t(), LangChainError.t()}
defp do_run(%LLMChain{current_failure_count: current_count, max_retry_count: max} = chain)
when current_count >= max do
Callbacks.fire(chain.callbacks, :on_retries_exceeded, [chain])
{:error, chain, "Exceeded max failure count"}

{:error, chain,
LangChainError.exception(
type: "exceeded_failure_count",
message: "Exceeded max failure count"
)}
end

defp do_run(%LLMChain{} = chain) do
@@ -385,10 +394,15 @@ defmodule LangChain.Chains.LLMChain do

{:ok, updated_chain}

{:error, reason} ->
{:error, %LangChainError{} = reason} ->
if chain.verbose, do: IO.inspect(reason, label: "ERROR")
Logger.error("Error during chat call. Reason: #{inspect(reason)}")
{:error, chain, reason}

{:error, string_reason} when is_binary(string_reason) ->
if chain.verbose, do: IO.inspect(string_reason, label: "ERROR")
Logger.error("Error during chat call. Reason: #{inspect(string_reason)}")
{:error, chain, LangChainError.exception(message: string_reason)}
end
end

@@ -736,7 +750,7 @@ defmodule LangChain.Chains.LLMChain do

case Function.execute(function, call.arguments, context) do
{:ok, llm_result, processed_result} ->
if verbose, do: IO.inspect(llm_result, label: "FUNCTION RESULT")
if verbose, do: IO.inspect(processed_result, label: "FUNCTION PROCESSED RESULT")
# successful execution and storage of processed_content.
ToolResult.new!(%{
tool_call_id: call.call_id,
2 changes: 1 addition & 1 deletion lib/chains/routing_chain.ex
Original file line number Diff line number Diff line change
@@ -106,7 +106,7 @@ defmodule LangChain.Chains.RoutingChain do
route.
"""
@spec run(t(), Keyword.t()) ::
{:ok, LLMChain.t(), Message.t() | [Message.t()]} | {:error, String.t()}
{:ok, LLMChain.t(), Message.t() | [Message.t()]} | {:error, LangChainError.t()}
def run(%RoutingChain{} = chain, opts \\ []) do
default_name = chain.default_route.name

2 changes: 1 addition & 1 deletion lib/chains/text_to_title_chain.ex
Original file line number Diff line number Diff line change
@@ -124,7 +124,7 @@ defmodule LangChain.Chains.TextToTitleChain do
|> TextToTitleChain.run()
"""
@spec run(t(), Keyword.t()) :: {:ok, LLMChain.t()} | {:error, LLMChain.t(), String.t()}
@spec run(t(), Keyword.t()) :: {:ok, LLMChain.t()} | {:error, LLMChain.t(), LangChainError.t()}
def run(%TextToTitleChain{} = chain, opts \\ []) do
messages =
[
90 changes: 57 additions & 33 deletions lib/chat_models/chat_anthropic.ex
Original file line number Diff line number Diff line change
@@ -237,15 +237,18 @@ defmodule LangChain.ChatModels.ChatAnthropic do
|> Map.drop([:model, :stream])
end

defp get_tool_choice(%ChatAnthropic{tool_choice: %{"type" => "tool", "name" => name}=_tool_choice}) when is_binary(name) and byte_size(name) > 0,
do: %{"type" => "tool", "name" => name}
defp get_tool_choice(%ChatAnthropic{
tool_choice: %{"type" => "tool", "name" => name} = _tool_choice
})
when is_binary(name) and byte_size(name) > 0,
do: %{"type" => "tool", "name" => name}

defp get_tool_choice(%ChatAnthropic{tool_choice: %{"type" => type}=_tool_choice}) when is_binary(type) and byte_size(type) > 0,
do: %{"type" => type}
defp get_tool_choice(%ChatAnthropic{tool_choice: %{"type" => type} = _tool_choice})
when is_binary(type) and byte_size(type) > 0,
do: %{"type" => type}

defp get_tool_choice(%ChatAnthropic{}), do: nil


defp get_tools_for_api(nil), do: []

defp get_tools_for_api(tools) do
@@ -288,15 +291,15 @@ defmodule LangChain.ChatModels.ChatAnthropic do
try do
# make base api request and perform high-level success/failure checks
case do_api_request(anthropic, messages, functions) do
{:error, reason} ->
{:error, reason}
{:error, %LangChainError{} = error} ->
{:error, error}

parsed_data ->
{:ok, parsed_data}
end
rescue
err in LangChainError ->
{:error, err.message}
{:error, err}
end
end

@@ -305,18 +308,20 @@ defmodule LangChain.ChatModels.ChatAnthropic do
# The result of the function is:
#
# - `result` - where `result` is a data-structure like a list or map.
# - `{:error, reason}` - Where reason is a string explanation of what went wrong.
# - `{:error, %LangChainError{} = reason}` - An `LangChain.LangChainError` exception with an explanation of what went wrong.
#
# If `stream: false`, the completed message is returned.
#
# Retries the request up to 3 times on transient errors with a 1 second delay
@doc false
@spec do_api_request(t(), [Message.t()], ChatModel.tools(), (any() -> any())) ::
list() | struct() | {:error, String.t()}
list() | struct() | {:error, LangChainError.t()}
def do_api_request(anthropic, messages, tools, retry_count \\ 3)

def do_api_request(_anthropic, _messages, _functions, 0) do
raise LangChainError, "Retries exceeded. Connection failed."
raise LangChainError,
type: "retries_exceeded",
message: "Retries exceeded. Connection failed."
end

def do_api_request(
@@ -341,7 +346,7 @@ defmodule LangChain.ChatModels.ChatAnthropic do
|> Req.post()
# parse the body and return it as parsed structs
|> case do
{:ok, %Req.Response{body: data} = response} ->
{:ok, %Req.Response{status: 200, body: data} = response} ->
Callbacks.fire(anthropic.callbacks, :on_llm_ratelimit_info, [
anthropic,
get_ratelimit_info(response.headers)
@@ -361,17 +366,22 @@ defmodule LangChain.ChatModels.ChatAnthropic do
result
end

{:error, %Req.TransportError{reason: :timeout}} ->
{:error, "Request timed out"}
{:ok, %Req.Response{status: 529}} ->
{:error, LangChainError.exception(type: "overloaded", message: "Overloaded")}

{:error, %Req.TransportError{reason: :timeout} = err} ->
{:error,
LangChainError.exception(type: "timeout", message: "Request timed out", original: err)}

{:error, %Req.TransportError{reason: :closed}} ->
# Force a retry by making a recursive call decrementing the counter
Logger.debug(fn -> "Mint connection closed: retry count = #{inspect(retry_count)}" end)
do_api_request(anthropic, messages, tools, retry_count - 1)

other ->
Logger.error("Unexpected and unhandled API response! #{inspect(other)}")
other
message = "Unexpected and unhandled API response! #{inspect(other)}"
Logger.error(message)
{:error, LangChainError.exception(type: "unexpected_response", message: message)}
end
end

@@ -405,23 +415,21 @@ defmodule LangChain.ChatModels.ChatAnthropic do

data

{:error, %LangChainError{message: reason}} ->
{:error, reason}
{:error, %LangChainError{} = error} ->
{:error, error}

{:error, %Req.TransportError{reason: :timeout}} ->
{:error, "Request timed out"}
{:error, %Req.TransportError{reason: :timeout} = err} ->
{:error, LangChainError.exception(type: "timeout", message: "Request timed out", original: err)}

{:error, %Req.TransportError{reason: :closed}} ->
# Force a retry by making a recursive call decrementing the counter
Logger.debug(fn -> "Mint connection closed: retry count = #{inspect(retry_count)}" end)
do_api_request(anthropic, messages, tools, retry_count - 1)

other ->
Logger.error(
"Unhandled and unexpected response from streamed post call. #{inspect(other)}"
)

{:error, "Unexpected response"}
message = "Unhandled and unexpected response from streamed post call. #{inspect(other)}"
Logger.error(message)
{:error, LangChainError.exception(type: "unexpected_response", message: message)}
end
end

@@ -466,7 +474,7 @@ defmodule LangChain.ChatModels.ChatAnthropic do
| [Message.t()]
| MessageDelta.t()
| [MessageDelta.t()]
| {:error, String.t()}
| {:error, LangChainError.t()}
def do_process_response(_model, %{
"role" => "assistant",
"content" => contents,
@@ -572,30 +580,43 @@ defmodule LangChain.ChatModels.ChatAnthropic do
|> to_response()
end

def do_process_response(_model, %{"error" => %{"message" => reason}}) do
def do_process_response(_model, %{
"type" => "error",
"error" => %{"type" => type, "message" => reason}
}) do
Logger.error("Received error from API: #{inspect(reason)}")
{:error, reason}
{:error, LangChainError.exception(type: type, message: reason)}
end

def do_process_response(_model, %{"error" => %{"message" => reason} = error}) do
Logger.error("Received error from API: #{inspect(reason)}")
{:error, LangChainError.exception(type: error["type"], message: reason)}
end

def do_process_response(_model, {:error, %Jason.DecodeError{} = response}) do
error_message = "Received invalid JSON: #{inspect(response)}"
Logger.error(error_message)
{:error, error_message}

{:error,
LangChainError.exception(type: "invalid_json", message: error_message, original: response)}
end

def do_process_response(%ChatAnthropic{bedrock: %BedrockConfig{}}, %{"message" => message}) do
{:error, "Received error from API: #{message}"}
{:error, LangChainError.exception(message: "Received error from API: #{message}")}
end

def do_process_response(%ChatAnthropic{bedrock: %BedrockConfig{}}, %{
bedrock_exception: exceptions
}) do
{:error, "Stream exception received: #{inspect(exceptions)}"}
{:error,
LangChainError.exception(message: "Stream exception received: #{inspect(exceptions)}")}
end

def do_process_response(_model, other) do
Logger.error("Trying to process an unexpected response. #{inspect(other)}")
{:error, "Unexpected response"}

{:error,
LangChainError.exception(type: "unexpected_response", message: "Unexpected response")}
end

# for parsing a list of received content JSON objects
@@ -650,7 +671,9 @@ defmodule LangChain.ChatModels.ChatAnthropic do
end

defp to_response({:ok, message}), do: message
defp to_response({:error, changeset}), do: {:error, Utils.changeset_error_to_string(changeset)}

defp to_response({:error, %Ecto.Changeset{} = changeset}),
do: {:error, LangChainError.exception(changeset)}

defp stop_reason_to_status("end_turn"), do: :complete
defp stop_reason_to_status("tool_use"), do: :complete
@@ -711,6 +734,7 @@ defmodule LangChain.ChatModels.ChatAnthropic do
defp relevant_event?("event: content_block_delta\n" <> _rest), do: true
defp relevant_event?("event: content_block_start\n" <> _rest), do: true
defp relevant_event?("event: message_delta\n" <> _rest), do: true
defp relevant_event?("event: error\n" <> _rest), do: true
# ignoring
defp relevant_event?("event: message_start\n" <> _rest), do: false
defp relevant_event?("event: ping\n" <> _rest), do: false
14 changes: 7 additions & 7 deletions lib/chat_models/chat_bumblebee.ex
Original file line number Diff line number Diff line change
@@ -179,8 +179,8 @@ defmodule LangChain.ChatModels.ChatBumblebee do
{:ok, chain} ->
chain

{:error, changeset} ->
raise LangChainError, changeset
{:error, %Ecto.Changeset{} = changeset} ->
raise LangChainError.exception(changeset)
end
end

@@ -229,7 +229,7 @@ defmodule LangChain.ChatModels.ChatBumblebee do
end
rescue
err in LangChainError ->
{:error, err.message}
{:error, err}
end
end

@@ -259,10 +259,10 @@ defmodule LangChain.ChatModels.ChatBumblebee do
# return a list of the complete message. As a list for compatibility.
[message]

{:error, changeset} ->
{:error, %Ecto.Changeset{} = changeset} ->
reason = Utils.changeset_error_to_string(changeset)
Logger.error("Failed to create non-streamed full message: #{inspect(reason)}")
{:error, reason}
{:error, LangChainError.exception(changeset)}
end
end

@@ -296,14 +296,14 @@ defmodule LangChain.ChatModels.ChatBumblebee do
Callbacks.fire(model.callbacks, :on_llm_new_delta, [model, delta])
delta

{:error, changeset} ->
{:error, %Ecto.Changeset{} = changeset} ->
reason = Utils.changeset_error_to_string(changeset)

Logger.error(
"Failed to process received model's MessageDelta data: #{inspect(reason)}"
)

raise LangChainError, reason
raise LangChainError.exception(changeset)
end
end

57 changes: 34 additions & 23 deletions lib/chat_models/chat_google_ai.ex
Original file line number Diff line number Diff line change
@@ -319,7 +319,7 @@ defmodule LangChain.ChatModels.ChatGoogleAI do

@doc false
@spec do_api_request(t(), [Message.t()], [Function.t()]) ::
list() | struct() | {:error, String.t()}
list() | struct() | {:error, LangChainError.t()}
def do_api_request(%ChatGoogleAI{stream: false} = google_ai, messages, tools) do
req =
Req.new(
@@ -344,11 +344,16 @@ defmodule LangChain.ChatModels.ChatGoogleAI do
result
end

{:ok, %Req.Response{status: status}} ->
{:error, "Failed with status: #{inspect(status)}"}
{:ok, %Req.Response{status: status} = err} ->
{:error,
LangChainError.exception(
message: "Failed with status: #{inspect(status)}",
original: err
)}

{:error, %Req.TransportError{reason: :timeout}} ->
{:error, "Request timed out"}
{:error, %Req.TransportError{reason: :timeout} = err} ->
{:error,
LangChainError.exception(type: "timeout", message: "Request timed out", original: err)}

other ->
Logger.error("Unexpected and unhandled API response! #{inspect(other)}")
@@ -378,21 +383,27 @@ defmodule LangChain.ChatModels.ChatGoogleAI do
# this behavior by forcing the final delta to have `status: :complete`.
complete_final_delta(data)

{:ok, %Req.Response{status: status}} ->
{:error, "Failed with status: #{inspect(status)}"}
{:ok, %Req.Response{status: status} = err} ->
{:error,
LangChainError.exception(
message: "Failed with status: #{inspect(status)}",
original: err
)}

{:error, %LangChainError{message: reason}} ->
{:error, reason}
{:error, %LangChainError{} = error} ->
{:error, error}

{:error, %Req.TransportError{reason: :timeout}} ->
{:error, "Request timed out"}
{:error, %Req.TransportError{reason: :timeout} = err} ->
{:error,
LangChainError.exception(type: "timeout", message: "Request timed out", original: err)}

other ->
Logger.error(
"Unhandled and unexpected response from streamed post call. #{inspect(other)}"
)

{:error, "Unexpected response"}
{:error,
LangChainError.exception(type: "unexpected_response", message: "Unexpected response")}
end
end

@@ -478,8 +489,8 @@ defmodule LangChain.ChatModels.ChatGoogleAI do
{:ok, message} ->
message

{:error, changeset} ->
{:error, Utils.changeset_error_to_string(changeset)}
{:error, %Ecto.Changeset{} = changeset} ->
{:error, LangChainError.exception(changeset)}
end
end

@@ -517,8 +528,8 @@ defmodule LangChain.ChatModels.ChatGoogleAI do
{:ok, message} ->
message

{:error, changeset} ->
{:error, Utils.changeset_error_to_string(changeset)}
{:error, %Ecto.Changeset{} = changeset} ->
{:error, LangChainError.exception(changeset)}
end
end

@@ -539,8 +550,8 @@ defmodule LangChain.ChatModels.ChatGoogleAI do
{:ok, message} ->
message

{:error, changeset} ->
{:error, Utils.changeset_error_to_string(changeset)}
{:error, %Ecto.Changeset{} = changeset} ->
{:error, LangChainError.exception(changeset)}
end
end

@@ -574,25 +585,25 @@ defmodule LangChain.ChatModels.ChatGoogleAI do
{:ok, message} ->
message

{:error, changeset} ->
{:error, Utils.changeset_error_to_string(changeset)}
{:error, %Ecto.Changeset{} = changeset} ->
{:error, LangChainError.exception(changeset)}
end
end

def do_process_response(_model, %{"error" => %{"message" => reason}}, _) do
Logger.error("Received error from API: #{inspect(reason)}")
{:error, reason}
{:error, LangChainError.exception(message: reason)}
end

def do_process_response(_model, {:error, %Jason.DecodeError{} = response}, _) do
error_message = "Received invalid JSON: #{inspect(response)}"
Logger.error(error_message)
{:error, error_message}
{:error, LangChainError.exception(type: "invalid_json", message: error_message, original: response)}
end

def do_process_response(_model, other, _) do
Logger.error("Trying to process an unexpected response. #{inspect(other)}")
{:error, "Unexpected response"}
{:error, LangChainError.exception(type: "unexpected_response", message: "Unexpected response")}
end

@doc false
40 changes: 25 additions & 15 deletions lib/chat_models/chat_mistral_ai.ex
Original file line number Diff line number Diff line change
@@ -156,7 +156,7 @@ defmodule LangChain.ChatModels.ChatMistralAI do
end
rescue
err in LangChainError ->
{:error, err.message}
{:error, err}
end
end

@@ -165,7 +165,10 @@ defmodule LangChain.ChatModels.ChatMistralAI do
def do_api_request(mistral, messages, functions, retry_count \\ 3)

def do_api_request(_mistral, _messages, _functions, 0) do
raise LangChainError, "Retries exceeded. Connection failed."
raise LangChainError.exception(
type: "retries_exceeded",
message: "Retries exceeded. Connection failed."
)
end

def do_api_request(
@@ -199,8 +202,9 @@ defmodule LangChain.ChatModels.ChatMistralAI do
result
end

{:error, %Req.TransportError{reason: :timeout}} ->
{:error, "Request timed out"}
{:error, %Req.TransportError{reason: :timeout} = err} ->
{:error,
LangChainError.exception(type: "timeout", message: "Request timed out", original: err)}

{:error, %Req.TransportError{reason: :closed}} ->
# Force a retry by making a recursive call decrementing the counter
@@ -237,11 +241,12 @@ defmodule LangChain.ChatModels.ChatMistralAI do
{:ok, %Req.Response{body: data}} ->
data

{:error, %LangChainError{message: reason}} ->
{:error, reason}
{:error, %LangChainError{} = err} ->
{:error, err}

{:error, %Req.TransportError{reason: :timeout}} ->
{:error, "Request timed out"}
{:error, %Req.TransportError{reason: :timeout} = err} ->
{:error,
LangChainError.exception(type: "timeout", message: "Request timed out", original: err)}

{:error, %Req.TransportError{reason: :closed}} ->
# Force a retry by making a recursive call decrementing the counter
@@ -253,7 +258,8 @@ defmodule LangChain.ChatModels.ChatMistralAI do
"Unhandled and unexpected response from streamed post call. #{inspect(other)}"
)

{:error, "Unexpected response"}
{:error,
LangChainError.exception(type: "unexpected_response", message: "Unexpected response")}
end
end

@@ -264,7 +270,7 @@ defmodule LangChain.ChatModels.ChatMistralAI do
| [Message.t()]
| MessageDelta.t()
| [MessageDelta.t()]
| {:error, String.t()}
| {:error, LangChainError.t()}
def do_process_response(model, %{"choices" => choices}) when is_list(choices) do
# process each response individually. Return a list of all processed choices
for choice <- choices do
@@ -344,25 +350,29 @@ defmodule LangChain.ChatModels.ChatMistralAI do
{:ok, message} ->
message

{:error, changeset} ->
{:error, Utils.changeset_error_to_string(changeset)}
{:error, %Ecto.Changeset{} = changeset} ->
{:error, LangChainError.exception(changeset)}
end
end

def do_process_response(_model, %{"error" => %{"message" => reason}}) do
Logger.error("Received error from API: #{inspect(reason)}")
{:error, reason}
{:error, LangChainError.exception(message: reason)}
end

def do_process_response(_model, {:error, %Jason.DecodeError{} = response}) do
error_message = "Received invalid JSON: #{inspect(response)}"
Logger.error(error_message)
{:error, error_message}

{:error,
LangChainError.exception(type: "invalid_json", message: error_message, original: response)}
end

def do_process_response(_model, other) do
Logger.error("Trying to process an unexpected response. #{inspect(other)}")
{:error, "Unexpected response"}

{:error,
LangChainError.exception(type: "unexpected_response", message: "Unexpected response")}
end

@doc """
3 changes: 2 additions & 1 deletion lib/chat_models/chat_model.ex
Original file line number Diff line number Diff line change
@@ -3,10 +3,11 @@ defmodule LangChain.ChatModels.ChatModel do
alias LangChain.Message
alias LangChain.MessageDelta
alias LangChain.Function
alias LangChain.LangChainError
alias LangChain.Utils

@type call_response ::
{:ok, Message.t() | [Message.t()] | [MessageDelta.t()]} | {:error, String.t()}
{:ok, Message.t() | [Message.t()] | [MessageDelta.t()]} | {:error, LangChainError.t()}

@type tool :: Function.t()
@type tools :: [tool()]
26 changes: 13 additions & 13 deletions lib/chat_models/chat_ollama_ai.ex
Original file line number Diff line number Diff line change
@@ -265,22 +265,22 @@ defmodule LangChain.ChatModels.ChatOllamaAI do
# The result of the function is:
#
# - `result` - where `result` is a data-structure like a list or map.
# - `{:error, reason}` - Where reason is a string explanation of what went wrong.
# - `{:error, reason}` - Where reason is a `LangChain.LangChainError`
# explanation of what went wrong.
#
# **NOTE:** callback function are IGNORED for ollama ai
# When `stream: true` is
# **NOTE:** callback function are IGNORED for ollama ai When `stream: true` is
# If `stream: false`, the completed message is returned.
#
# If `stream: true`, the completed message is returned after MessageDelta's.
#
# Retries the request up to 3 times on transient errors with a 1 second delay
@doc false
@spec do_api_request(t(), [Message.t()], [Function.t()]) ::
list() | struct() | {:error, String.t()}
list() | struct() | {:error, LangChainError.t()}
def do_api_request(ollama_ai, messages, functions, retry_count \\ 3)

def do_api_request(_ollama_ai, _messages, _functions, 0) do
raise LangChainError, "Retries exceeded. Connection failed."
raise LangChainError.exception(type: "retries_exceeded", message: "Retries exceeded. Connection failed.")
end

def do_api_request(
@@ -348,11 +348,11 @@ defmodule LangChain.ChatModels.ChatOllamaAI do
{:ok, %Req.Response{body: data}} ->
data

{:error, %LangChainError{message: reason}} ->
{:error, reason}
{:error, %LangChainError{} = error} ->
{:error, error}

{:error, %Req.TransportError{reason: :timeout}} ->
{:error, "Request timed out"}
{:error, %Req.TransportError{reason: :timeout} = err} ->
{:error, LangChainError.exception(type: "timeout", message: "Request timed out", original: err)}

{:error, %Req.TransportError{reason: :closed}} ->
# Force a retry by making a recursive call decrementing the counter
@@ -364,7 +364,7 @@ defmodule LangChain.ChatModels.ChatOllamaAI do
"Unhandled and unexpected response from streamed post call. #{inspect(other)}"
)

{:error, "Unexpected response"}
{:error, LangChainError.exception(type: "unexpected_response", message: "Unexpected response")}
end
end

@@ -382,16 +382,16 @@ defmodule LangChain.ChatModels.ChatOllamaAI do

def do_process_response(_model, %{"error" => reason}) do
Logger.error("Received error from API: #{inspect(reason)}")
{:error, reason}
{:error, LangChainError.exception(message: reason)}
end

defp create_message(message, status, message_type) do
case message_type.new(Map.merge(message, %{"status" => status})) do
{:ok, new_message} ->
new_message

{:error, changeset} ->
{:error, Utils.changeset_error_to_string(changeset)}
{:error, %Ecto.Changeset{} = changeset} ->
{:error, LangChainError.exception(changeset)}
end
end

49 changes: 26 additions & 23 deletions lib/chat_models/chat_open_ai.ex
Original file line number Diff line number Diff line change
@@ -493,7 +493,7 @@ defmodule LangChain.ChatModels.ChatOpenAI do
end
rescue
err in LangChainError ->
{:error, err.message}
{:error, err}
end
end

@@ -517,7 +517,7 @@ defmodule LangChain.ChatModels.ChatOpenAI do
# Retries the request up to 3 times on transient errors with a 1 second delay
@doc false
@spec do_api_request(t(), [Message.t()], ChatModel.tools(), integer()) ::
list() | struct() | {:error, String.t()}
list() | struct() | {:error, LangChainError.t()}
def do_api_request(openai, messages, tools, retry_count \\ 3)

def do_api_request(_openai, _messages, _tools, 0) do
@@ -564,16 +564,17 @@ defmodule LangChain.ChatModels.ChatOpenAI do
])

case do_process_response(openai, data) do
{:error, reason} ->
{:error, %LangChainError{} = reason} ->
{:error, reason}

result ->
Callbacks.fire(openai.callbacks, :on_llm_new_message, [openai, result])
result
end

{:error, %Req.TransportError{reason: :timeout}} ->
{:error, "Request timed out"}
{:error, %Req.TransportError{reason: :timeout} = err} ->
{:error,
LangChainError.exception(type: "timeout", message: "Request timed out", original: err)}

{:error, %Req.TransportError{reason: :closed}} ->
# Force a retry by making a recursive call decrementing the counter
@@ -617,11 +618,12 @@ defmodule LangChain.ChatModels.ChatOpenAI do

data

{:error, %LangChainError{message: reason}} ->
{:error, reason}
{:error, %LangChainError{} = error} ->
{:error, error}

{:error, %Req.TransportError{reason: :timeout}} ->
{:error, "Request timed out"}
{:error, %Req.TransportError{reason: :timeout} = err} ->
{:error,
LangChainError.exception(type: "timeout", message: "Request timed out", original: err)}

{:error, %Req.TransportError{reason: :closed}} ->
# Force a retry by making a recursive call decrementing the counter
@@ -633,7 +635,8 @@ defmodule LangChain.ChatModels.ChatOpenAI do
"Unhandled and unexpected response from streamed post call. #{inspect(other)}"
)

{:error, "Unexpected response"}
{:error,
LangChainError.exception(type: "unexpected_response", message: "Unexpected response")}
end
end

@@ -751,8 +754,8 @@ defmodule LangChain.ChatModels.ChatOpenAI do
{:ok, message} ->
message

{:error, changeset} ->
{:error, Utils.changeset_error_to_string(changeset)}
{:error, %Ecto.Changeset{} = changeset} ->
{:error, LangChainError.exception(changeset)}
end
end

@@ -792,8 +795,8 @@ defmodule LangChain.ChatModels.ChatOpenAI do
{:ok, message} ->
message

{:error, changeset} ->
{:error, Utils.changeset_error_to_string(changeset)}
{:error, %Ecto.Changeset{} = changeset} ->
{:error, LangChainError.exception(changeset)}
end
end

@@ -811,10 +814,10 @@ defmodule LangChain.ChatModels.ChatOpenAI do
{:ok, %ToolCall{} = call} ->
call

{:error, changeset} ->
{:error, %Ecto.Changeset{} = changeset} ->
reason = Utils.changeset_error_to_string(changeset)
Logger.error("Failed to process ToolCall for a function. Reason: #{reason}")
{:error, reason}
{:error, LangChainError.exception(changeset)}
end
end

@@ -838,10 +841,10 @@ defmodule LangChain.ChatModels.ChatOpenAI do
{:ok, %ToolCall{} = call} ->
call

{:error, changeset} ->
{:error, %Ecto.Changeset{} = changeset} ->
reason = Utils.changeset_error_to_string(changeset)
Logger.error("Failed to process ToolCall for a function. Reason: #{reason}")
{:error, reason}
{:error, LangChainError.exception(changeset)}
end
end

@@ -856,25 +859,25 @@ defmodule LangChain.ChatModels.ChatOpenAI do
{:ok, message} ->
message

{:error, changeset} ->
{:error, Utils.changeset_error_to_string(changeset)}
{:error, %Ecto.Changeset{} = changeset} ->
{:error, LangChainError.exception(changeset)}
end
end

def do_process_response(_model, %{"error" => %{"message" => reason}}) do
Logger.error("Received error from API: #{inspect(reason)}")
{:error, reason}
{:error, LangChainError.exception(message: reason)}
end

def do_process_response(_model, {:error, %Jason.DecodeError{} = response}) do
error_message = "Received invalid JSON: #{inspect(response)}"
Logger.error(error_message)
{:error, error_message}
{:error, LangChainError.exception(type: "invalid_json", message: error_message, original: response)}
end

def do_process_response(_model, other) do
Logger.error("Trying to process an unexpected response. #{inspect(other)}")
{:error, "Unexpected response"}
{:error, LangChainError.exception(message: "Unexpected response")}
end

defp finish_reason_to_status(nil), do: :incomplete
45 changes: 26 additions & 19 deletions lib/chat_models/chat_vertex_ai.ex
Original file line number Diff line number Diff line change
@@ -289,13 +289,13 @@ defmodule LangChain.ChatModels.ChatVertexAI do
end
rescue
err in LangChainError ->
{:error, err.message}
{:error, err}
end
end

@doc false
@spec do_api_request(t(), [Message.t()], [Function.t()]) ::
list() | struct() | {:error, String.t()}
list() | struct() | {:error, LangChainError.t()}
def do_api_request(%ChatVertexAI{stream: false} = vertex_ai, messages, tools) do
req =
Req.new(
@@ -321,8 +321,9 @@ defmodule LangChain.ChatModels.ChatVertexAI do
result
end

{:error, %Req.TransportError{reason: :timeout}} ->
{:error, "Request timed out"}
{:error, %Req.TransportError{reason: :timeout} = err} ->
{:error,
LangChainError.exception(type: "timeout", message: "Request timed out", original: err)}

other ->
Logger.error("Unexpected and unhandled API response! #{inspect(other)}")
@@ -353,18 +354,20 @@ defmodule LangChain.ChatModels.ChatVertexAI do
# this behavior by forcing the final delta to have `status: :complete`.
complete_final_delta(data)

{:error, %LangChainError{message: reason}} ->
{:error, reason}
{:error, %LangChainError{} = error} ->
{:error, error}

{:error, %Req.TransportError{reason: :timeout}} ->
{:error, "Request timed out"}
{:error, %Req.TransportError{reason: :timeout} = err} ->
{:error,
LangChainError.exception(type: "timeout", message: "Request timed out", original: err)}

other ->
Logger.error(
"Unhandled and unexpected response from streamed post call. #{inspect(other)}"
)

{:error, "Unexpected response"}
{:error,
LangChainError.exception(type: "unexpected_response", message: "Unexpected response")}
end
end

@@ -428,8 +431,8 @@ defmodule LangChain.ChatModels.ChatVertexAI do
{:ok, message} ->
message

{:error, changeset} ->
{:error, Utils.changeset_error_to_string(changeset)}
{:error, %Ecto.Changeset{} = changeset} ->
{:error, LangChainError.exception(changeset)}
end
end

@@ -468,8 +471,8 @@ defmodule LangChain.ChatModels.ChatVertexAI do
{:ok, message} ->
message

{:error, changeset} ->
{:error, Utils.changeset_error_to_string(changeset)}
{:error, %Ecto.Changeset{} = changeset} ->
{:error, LangChainError.exception(changeset)}
end
end

@@ -486,8 +489,8 @@ defmodule LangChain.ChatModels.ChatVertexAI do
{:ok, message} ->
message

{:error, changeset} ->
{:error, Utils.changeset_error_to_string(changeset)}
{:error, %Ecto.Changeset{} = changeset} ->
{:error, LangChainError.exception(changeset)}
end
end

@@ -530,8 +533,8 @@ defmodule LangChain.ChatModels.ChatVertexAI do
{:ok, message} ->
message

{:error, changeset} ->
{:error, Utils.changeset_error_to_string(changeset)}
{:error, %Ecto.Changeset{} = changeset} ->
{:error, LangChainError.exception(changeset)}
end
end

@@ -543,12 +546,16 @@ defmodule LangChain.ChatModels.ChatVertexAI do
def do_process_response({:error, %Jason.DecodeError{} = response}, _) do
error_message = "Received invalid JSON: #{inspect(response)}"
Logger.error(error_message)
{:error, error_message}

{:error,
LangChainError.exception(type: "invalid_json", message: error_message, original: response)}
end

def do_process_response(other, _) do
Logger.error("Trying to process an unexpected response. #{inspect(other)}")
{:error, "Unexpected response"}

{:error,
LangChainError.exception(type: "unexpected_response", message: "Unexpected response")}
end

@doc false
21 changes: 19 additions & 2 deletions lib/langchain_error.ex
Original file line number Diff line number Diff line change
@@ -9,13 +9,22 @@ defmodule LangChain.LangChainError do
raise LangChainError, "Message text"
raise LangChainError, type: "overloaded_error", message: "Message text"
The error struct contains the following keys:
- `:type` - A string code to make detecting and responding to specific errors easier. This may have values like "length" or "overloaded_error". The specific meaning of the type is dependent on the service or model.
- `:message` - A string representation or explanation of the error.
- `:original` - If a exception was caught and wrapped into a LangChainError, this may be the original message that was encountered.
"""
import LangChain.Utils, only: [changeset_error_to_string: 1]
alias __MODULE__

@type t :: %LangChainError{}

defexception [:message]
defexception [:type, :message, :original]

@doc """
Create the exception using either a message or a changeset who's errors are
@@ -28,6 +37,14 @@ defmodule LangChain.LangChainError do

def exception(%Ecto.Changeset{} = changeset) do
text_reason = changeset_error_to_string(changeset)
%LangChainError{message: text_reason}
%LangChainError{type: "changeset", message: text_reason}
end

def exception(opts) when is_list(opts) do
%LangChainError{
message: Keyword.fetch!(opts, :message),
type: Keyword.get(opts, :type),
original: Keyword.get(opts, :original),
}
end
end
5 changes: 4 additions & 1 deletion lib/message.ex
Original file line number Diff line number Diff line change
@@ -241,7 +241,10 @@ defmodule LangChain.Message do
# convert the error to text and return error tuple
{:error, Utils.changeset_error_to_string(changeset)}

{:error, reason} ->
{:error, %LangChainError{message: message}} ->
{:error, message}

{:error, reason} when is_binary(reason) ->
{:error, reason}
end
end)
29 changes: 19 additions & 10 deletions lib/utils/chain_result.ex
Original file line number Diff line number Diff line change
@@ -21,10 +21,10 @@ defmodule LangChain.Utils.ChainResult do
@spec to_string(
LLMChain.t()
| {:ok, LLMChain.t()}
| {:error, LLMChain.t(), String.t()}
| {:error, LLMChain.t(), LangChainError.t()}
) ::
{:ok, String.t()} | {:error, LLMChain.t(), String.t()}
def to_string({:error, chain, reason}) when is_binary(reason) do
{:ok, String.t()} | {:error, LLMChain.t(), LangChainError.t()}
def to_string({:error, chain, %LangChainError{} = reason}) do
# if an error was passed in, forward it through.
{:error, chain, reason}
end
@@ -34,7 +34,15 @@ defmodule LangChain.Utils.ChainResult do
end

# when received a single ContentPart
def to_string(%LLMChain{last_message: %Message{role: :assistant, status: :complete, content: [%ContentPart{type: :text} = part]}} = _chain) do
def to_string(
%LLMChain{
last_message: %Message{
role: :assistant,
status: :complete,
content: [%ContentPart{type: :text} = part]
}
} = _chain
) do
{:ok, part.content}
end

@@ -43,15 +51,16 @@ defmodule LangChain.Utils.ChainResult do
end

def to_string(%LLMChain{last_message: %Message{role: :assistant, status: _incomplete}} = chain) do
{:error, chain, "Message is incomplete"}
{:error, chain, LangChainError.exception(type: "to_string", message: "Message is incomplete")}
end

def to_string(%LLMChain{last_message: %Message{}} = chain) do
{:error, chain, "Message is not from assistant"}
{:error, chain,
LangChainError.exception(type: "to_string", message: "Message is not from assistant")}
end

def to_string(%LLMChain{last_message: nil} = chain) do
{:error, chain, "No last message"}
{:error, chain, LangChainError.exception(type: "to_string", message: "No last message")}
end

@doc """
@@ -64,7 +73,7 @@ defmodule LangChain.Utils.ChainResult do
def to_string!(%LLMChain{} = chain) do
case ChainResult.to_string(chain) do
{:ok, result} -> result
{:error, _chain, reason} -> raise LangChainError, reason
{:error, _chain, %LangChainError{} = exception} -> raise exception
end
end

@@ -92,8 +101,8 @@ defmodule LangChain.Utils.ChainResult do
{:ok, updated} ->
updated

{:error, _chain, reason} ->
raise LangChainError, reason
{:error, _chain, %LangChainError{} = exception} ->
raise exception
end
end
end
30 changes: 18 additions & 12 deletions test/chains/llm_chain_test.exs
Original file line number Diff line number Diff line change
@@ -948,28 +948,30 @@ defmodule LangChain.Chains.LLMChainTest do
@tag live_call: true, live_open_ai: true
test "NON-STREAMING handles receiving an error when no messages sent" do
# create and run the chain
{:error, _updated_chain, reason} =
{:error, _updated_chain, %LangChainError{} = reason} =
LLMChain.new!(%{
llm: ChatOpenAI.new!(%{seed: 0, stream: false}),
verbose: false
})
|> LLMChain.run()

assert reason ==
assert reason.type == nil
assert reason.message ==
"Invalid 'messages': empty array. Expected an array with minimum length 1, but got an empty array instead."
end

@tag live_call: true, live_open_ai: true
test "STREAMING handles receiving an error when no messages sent" do
# create and run the chain
{:error, _updated_chain, reason} =
{:error, _updated_chain, %LangChainError{} = reason} =
LLMChain.new!(%{
llm: ChatOpenAI.new!(%{seed: 0, stream: true}),
verbose: false
})
|> LLMChain.run()

assert reason ==
assert reason.type == nil
assert reason.message ==
"Invalid 'messages': empty array. Expected an array with minimum length 1, but got an empty array instead."
end

@@ -1061,13 +1063,14 @@ defmodule LangChain.Chains.LLMChainTest do

# errors when trying to send a PromptTemplate
# create and run the chain
{:error, _updated_chain, reason} =
{:error, _updated_chain, %LangChainError{} = reason} =
%{llm: ChatOpenAI.new!(%{seed: 0})}
|> LLMChain.new!()
|> LLMChain.add_messages(messages)
|> LLMChain.run()

assert reason =~ ~r/PromptTemplates must be/
assert reason.type == nil
assert reason.message =~ ~r/PromptTemplates must be/
end

test "mode: :while_needs_response - increments current_failure_count on parse failure", %{
@@ -1087,15 +1090,16 @@ defmodule LangChain.Chains.LLMChainTest do
Message.new_user!("Say what I want you to say.")
]

{:error, error_chain, reason} =
{:error, error_chain, %LangChainError{} = reason} =
chain
|> LLMChain.message_processors([JsonProcessor.new!()])
|> LLMChain.add_messages(messages)
# run repeatedly
|> LLMChain.run(mode: :while_needs_response)

assert error_chain.current_failure_count == 3
assert reason == "Exceeded max failure count"
assert reason.type == "exceeded_failure_count"
assert reason.message == "Exceeded max failure count"

[m1, m2, m3, m4, m5, m6, m7] = error_chain.messages

@@ -1155,7 +1159,7 @@ defmodule LangChain.Chains.LLMChainTest do
callbacks: [handler]
})

{:error, error_chain, reason} =
{:error, error_chain, %LangChainError{} = reason} =
chain
|> LLMChain.message_processors([JsonProcessor.new!()])
|> LLMChain.add_messages([
@@ -1165,7 +1169,8 @@ defmodule LangChain.Chains.LLMChainTest do
|> LLMChain.run(mode: :while_needs_response)

assert error_chain.current_failure_count == 2
assert reason == "Exceeded max failure count"
assert reason.type == "exceeded_failure_count"
assert reason.message == "Exceeded max failure count"

[m1, m2, m3, m4, m5] = error_chain.messages

@@ -1368,15 +1373,16 @@ defmodule LangChain.Chains.LLMChainTest do
{:ok, fake_messages}
end)

{:error, updated_chain, reason} =
{:error, updated_chain, %LangChainError{} = reason} =
%{llm: ChatOpenAI.new!(%{stream: false}), verbose: false}
|> LLMChain.new!()
|> LLMChain.add_tools([fail_func])
|> LLMChain.add_message(Message.new_system!())
|> LLMChain.add_message(Message.new_user!("Execute the fail_func tool."))
|> LLMChain.run(mode: :until_success)

assert reason == "Exceeded max failure count"
assert reason.type == "exceeded_failure_count"
assert reason.message == "Exceeded max failure count"
assert updated_chain.current_failure_count == 3
end
end
67 changes: 54 additions & 13 deletions test/chat_models/chat_anthropic_test.exs
Original file line number Diff line number Diff line change
@@ -14,6 +14,7 @@ defmodule LangChain.ChatModels.ChatAnthropicTest do
alias LangChain.Function
alias LangChain.FunctionParam
alias LangChain.BedrockHelpers
alias LangChain.LangChainError

@test_model "claude-3-opus-20240229"
@bedrock_test_model "anthropic.claude-3-5-sonnet-20240620-v1:0"
@@ -287,16 +288,24 @@ defmodule LangChain.ChatModels.ChatAnthropicTest do

test "handles error messages", %{model: model} do
error = "Invalid API key"
message = "Received error from API: #{error}"

assert {:error, "Received error from API: #{error}"} ==
assert {:error, exception} =
ChatAnthropic.do_process_response(model, %{"message" => error})

assert exception.type == nil
assert exception.message == message
end

test "handles stream error messages", %{model: model} do
error = "Internal error"
message = "Stream exception received: #{inspect(error)}"

assert {:error, "Stream exception received: #{inspect(error)}"} ==
assert {:error, exception} =
ChatAnthropic.do_process_response(model, %{bedrock_exception: error})

assert exception.type == nil
assert exception.message == message
end
end

@@ -495,19 +504,35 @@ defmodule LangChain.ChatModels.ChatAnthropicTest do
assert call.name == "get_weather"
assert call.arguments == %{"location" => "San Francisco, CA", "unit" => "celsius"}
end

test "handles receiving overloaded error", %{model: model} do
response = %{
"type" => "error",
"error" => %{
"details" => nil,
"type" => "overloaded_error",
"message" => "Overloaded"
}
}

assert {:error, exception} = ChatAnthropic.do_process_response(model, response)

assert exception.type == "overloaded_error"
assert exception.message == "Overloaded"
end
end

describe "call/2" do
@tag live_call: true, live_anthropic: true
test "handles when invalid API key given" do
{:ok, chat} = ChatAnthropic.new(%{stream: true, api_key: "invalid"})

{:error, reason} =
{:error, %LangChainError{} = exception} =
ChatAnthropic.call(chat, [
Message.new_user!("Return the response 'Colorful Threads'.")
])

assert reason == "Authentication failure with request"
assert exception.message == "Authentication failure with request"
end

@tag live_call: true, live_anthropic_bedrock: true
@@ -521,12 +546,12 @@ defmodule LangChain.ChatModels.ChatAnthropicTest do
}
})

{:error, reason} =
{:error, %LangChainError{} = exception} =
ChatAnthropic.call(chat, [
Message.new_user!("Return the response 'Colorful Threads'.")
])

assert reason ==
assert exception.message ==
"Received error from API: The security token included in the request is invalid."
end

@@ -567,13 +592,7 @@ defmodule LangChain.ChatModels.ChatAnthropicTest do
role: :assistant
},
%LangChain.MessageDelta{
content: " up the good work",
status: :incomplete,
index: nil,
role: :assistant
},
%LangChain.MessageDelta{
content: "!",
content: " up the good work!",
status: :incomplete,
index: nil,
role: :assistant
@@ -860,6 +879,28 @@ data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text

assert buffer == ""
end

test "handles error overloaded message" do
chunk = """
event: error\ndata: {\"type\":\"error\",\"error\":{\"details\":null,\"type\":\"overloaded_error\",\"message\":\"Overloaded\"}}
"""

{parsed, buffer} = ChatAnthropic.decode_stream(%ChatAnthropic{}, {chunk, ""})

assert [
%{
"type" => "error",
"error" => %{
"details" => nil,
"type" => "overloaded_error",
"message" => "Overloaded"
}
}
] = parsed

assert buffer == ""
end
end

describe "for_api/1" do
19 changes: 12 additions & 7 deletions test/chat_models/chat_google_ai_test.exs
Original file line number Diff line number Diff line change
@@ -402,8 +402,9 @@ defmodule ChatModels.ChatGoogleAITest do
]
}

assert [{:error, error_string}] = ChatGoogleAI.do_process_response(model, response)
assert error_string == "role: is invalid"
assert [{:error, %LangChainError{} = error}] = ChatGoogleAI.do_process_response(model, response)
assert error.type == "changeset"
assert error.message == "role: is invalid"
end

test "handles receiving function calls", %{model: model} do
@@ -482,20 +483,24 @@ defmodule ChatModels.ChatGoogleAITest do
}
}

assert {:error, error_string} = ChatGoogleAI.do_process_response(model, response)
assert error_string == "Invalid request"
assert {:error, %LangChainError{} = error} = ChatGoogleAI.do_process_response(model, response)
assert error.type == nil
assert error.message == "Invalid request"
end

test "handles Jason.DecodeError", %{model: model} do
response = {:error, %Jason.DecodeError{}}

assert {:error, error_string} = ChatGoogleAI.do_process_response(model, response)
assert "Received invalid JSON:" <> _ = error_string
assert {:error, %LangChainError{} = error} = ChatGoogleAI.do_process_response(model, response)
assert error.type == "invalid_json"
assert "Received invalid JSON:" <> _ = error.message
end

test "handles unexpected response with error", %{model: model} do
response = %{}
assert {:error, "Unexpected response"} = ChatGoogleAI.do_process_response(model, response)
assert {:error, %LangChainError{} = error} = ChatGoogleAI.do_process_response(model, response)
assert error.type == "unexpected_response"
assert error.message == "Unexpected response"
end
end

25 changes: 19 additions & 6 deletions test/chat_models/chat_mistral_ai_test.exs
Original file line number Diff line number Diff line change
@@ -4,6 +4,7 @@ defmodule LangChain.ChatModels.ChatMistralAITest do
alias LangChain.ChatModels.ChatMistralAI
alias LangChain.Message
alias LangChain.MessageDelta
alias LangChain.LangChainError

setup do
model = ChatMistralAI.new!(%{"model" => "mistral-tiny"})
@@ -125,7 +126,10 @@ defmodule LangChain.ChatModels.ChatMistralAITest do
]
}

assert [{:error, "role: is invalid"}] = ChatMistralAI.do_process_response(model, response)
assert [{:error, %LangChainError{} = error}] =
ChatMistralAI.do_process_response(model, response)

assert error.message == "role: is invalid"
end

test "handles receiving MessageDeltas as well", %{model: model} do
@@ -159,20 +163,29 @@ defmodule LangChain.ChatModels.ChatMistralAITest do
}
}

assert {:error, error_string} = ChatMistralAI.do_process_response(model, response)
assert error_string == "Invalid request"
assert {:error, %LangChainError{} = error} = ChatMistralAI.do_process_response(model, response)

assert error.type == nil
assert error.message == "Invalid request"
end

test "handles Jason.DecodeError", %{model: model} do
response = {:error, %Jason.DecodeError{}}

assert {:error, error_string} = ChatMistralAI.do_process_response(model, response)
assert "Received invalid JSON:" <> _ = error_string
assert {:error, %LangChainError{} = error} = ChatMistralAI.do_process_response(model, response)

assert error.type == "invalid_json"
assert "Received invalid JSON:" <> _ = error.message
end

test "handles unexpected response with error", %{model: model} do
response = %{}
assert {:error, "Unexpected response"} = ChatMistralAI.do_process_response(model, response)

assert {:error, %LangChainError{} = error} =
ChatMistralAI.do_process_response(model, response)

assert error.type == "unexpected_response"
assert error.message == "Unexpected response"
end
end

96 changes: 59 additions & 37 deletions test/chat_models/chat_open_ai_test.exs
Original file line number Diff line number Diff line change
@@ -7,6 +7,7 @@ defmodule LangChain.ChatModels.ChatOpenAITest do
alias LangChain.Function
alias LangChain.FunctionParam
alias LangChain.TokenUsage
alias LangChain.LangChainError
alias LangChain.Message
alias LangChain.Message.ContentPart
alias LangChain.Message.ToolCall
@@ -83,11 +84,12 @@ defmodule LangChain.ChatModels.ChatOpenAITest do
}
}

{:ok, openai} = ChatOpenAI.new(%{
"model" => @test_model,
"json_response" => true,
"json_schema" => json_schema
})
{:ok, openai} =
ChatOpenAI.new(%{
"model" => @test_model,
"json_response" => true,
"json_schema" => json_schema
})

assert openai.json_response == true
assert openai.json_schema == json_schema
@@ -149,10 +151,11 @@ defmodule LangChain.ChatModels.ChatOpenAITest do
assert data.model == @test_model
assert data.temperature == 1
assert data.frequency_penalty == 0.5

assert data.response_format == %{
"type" => "json_schema",
"json_schema" => json_schema
}
"type" => "json_schema",
"json_schema" => json_schema
}
end

test "generates a map for an API call with max_tokens set" do
@@ -490,7 +493,7 @@ defmodule LangChain.ChatModels.ChatOpenAITest do
"description" => nil,
"enum" => ["yellow", "red", "green"],
"type" => "string"
}
}
},
"required" => ["p1"]
}
@@ -910,8 +913,9 @@ defmodule LangChain.ChatModels.ChatOpenAITest do
{:ok, chat} =
ChatOpenAI.new(%{model: "gpt-4-0613", seed: 0, stream: false, temperature: 1})

{:error, reason} = ChatOpenAI.call(chat, [too_large_user_request()])
assert reason =~ "maximum context length"
{:error, %LangChainError{} = reason} = ChatOpenAI.call(chat, [too_large_user_request()])
assert reason.type == nil
assert reason.message =~ "maximum context length"
end
end

@@ -1066,8 +1070,11 @@ defmodule LangChain.ChatModels.ChatOpenAITest do
}
}

assert {:error, reason} = ChatOpenAI.do_process_response(model, response)
assert reason == "tool_calls: arguments: invalid json"
assert {:error, %LangChainError{} = reason} =
ChatOpenAI.do_process_response(model, response)

assert reason.type == "changeset"
assert reason.message == "tool_calls: arguments: invalid json"
end

test "handles a single tool_call from list", %{model: model} do
@@ -1098,9 +1105,10 @@ defmodule LangChain.ChatModels.ChatOpenAITest do
"type" => "function"
}

assert {:error, message} = ChatOpenAI.do_process_response(model, call)
assert {:error, %LangChainError{} = error} = ChatOpenAI.do_process_response(model, call)

assert message == "arguments: invalid json"
assert error.type == "changeset"
assert error.message == "arguments: invalid json"
end

test "handles streamed deltas for multiple tool calls", %{model: model} do
@@ -1210,13 +1218,19 @@ defmodule LangChain.ChatModels.ChatOpenAITest do
end

test "handles json parse error from server", %{model: model} do
{:error, "Received invalid JSON: " <> _} =
{:error, %LangChainError{} = error} =
ChatOpenAI.do_process_response(model, Jason.decode("invalid json"))

assert error.type == "invalid_json"
assert "Received invalid JSON: " <> _ = error.message
end

test "handles unexpected response", %{model: model} do
{:error, "Unexpected response"} =
{:error, %LangChainError{} = error} =
ChatOpenAI.do_process_response(model, "unexpected")

assert error.type == nil
assert error.message == "Unexpected response"
end

test "return multiple responses when given multiple choices", %{model: model} do
@@ -1274,9 +1288,10 @@ defmodule LangChain.ChatModels.ChatOpenAITest do
test "STREAMING handles receiving an error when no messages sent" do
chat = ChatOpenAI.new!(%{seed: 0, stream: true})

{:error, reason} = ChatOpenAI.call(chat, [], [])
{:error, %LangChainError{} = reason} = ChatOpenAI.call(chat, [], [])

assert reason ==
assert reason.type == nil
assert reason.message ==
"Invalid 'messages': empty array. Expected an array with minimum length 1, but got an empty array instead."
end

@@ -1291,10 +1306,11 @@ defmodule LangChain.ChatModels.ChatOpenAITest do
{:ok, chat} =
ChatOpenAI.new(%{seed: 0, stream: true, receive_timeout: 50, callbacks: [handler]})

{:error, reason} =
{:error, %LangChainError{} = reason} =
ChatOpenAI.call(chat, [Message.new_user!("Why is the sky blue?")], [])

assert reason == "Request timed out"
assert reason.type == "timeout"
assert reason.message == "Request timed out"
end
end

@@ -2018,20 +2034,24 @@ defmodule LangChain.ChatModels.ChatOpenAITest do

describe "set_response_format/1" do
test "generates a map for an API call with text format when json_response is false" do
{:ok, openai} = ChatOpenAI.new(%{
model: @test_model,
json_response: false
})
{:ok, openai} =
ChatOpenAI.new(%{
model: @test_model,
json_response: false
})

data = ChatOpenAI.for_api(openai, [], [])

assert data.response_format == %{"type" => "text"}
end

test "generates a map for an API call with json_object format when json_response is true and no schema" do
{:ok, openai} = ChatOpenAI.new(%{
model: @test_model,
json_response: true
})
{:ok, openai} =
ChatOpenAI.new(%{
model: @test_model,
json_response: true
})

data = ChatOpenAI.for_api(openai, [], [])

assert data.response_format == %{"type" => "json_object"}
@@ -2046,17 +2066,19 @@ defmodule LangChain.ChatModels.ChatOpenAITest do
}
}

{:ok, openai} = ChatOpenAI.new(%{
model: @test_model,
json_response: true,
json_schema: json_schema
})
{:ok, openai} =
ChatOpenAI.new(%{
model: @test_model,
json_response: true,
json_schema: json_schema
})

data = ChatOpenAI.for_api(openai, [], [])

assert data.response_format == %{
"type" => "json_schema",
"json_schema" => json_schema
}
"type" => "json_schema",
"json_schema" => json_schema
}
end
end
end
16 changes: 11 additions & 5 deletions test/chat_models/chat_vertex_ai_test.exs
Original file line number Diff line number Diff line change
@@ -10,6 +10,7 @@ defmodule ChatModels.ChatVertexAITest do
alias LangChain.Message.ToolResult
alias LangChain.MessageDelta
alias LangChain.Function
alias LangChain.LangChainError

setup do
{:ok, hello_world} =
@@ -189,8 +190,9 @@ defmodule ChatModels.ChatVertexAITest do
]
}

assert [{:error, error_string}] = ChatVertexAI.do_process_response(response)
assert error_string == "role: is invalid"
assert [{:error, %LangChainError{} = error}] = ChatVertexAI.do_process_response(response)
assert error.type == "changeset"
assert error.message == "role: is invalid"
end

test "handles receiving function calls" do
@@ -254,13 +256,17 @@ defmodule ChatModels.ChatVertexAITest do
test "handles Jason.DecodeError" do
response = {:error, %Jason.DecodeError{}}

assert {:error, error_string} = ChatVertexAI.do_process_response(response)
assert "Received invalid JSON:" <> _ = error_string
assert {:error, %LangChainError{} = error} = ChatVertexAI.do_process_response(response)

assert error.type == "invalid_json"
assert "Received invalid JSON:" <> _ = error.message
end

test "handles unexpected response with error" do
response = %{}
assert {:error, "Unexpected response"} = ChatVertexAI.do_process_response(response)
assert {:error, %LangChainError{} = error} = ChatVertexAI.do_process_response(response)
assert error.type == "unexpected_response"
assert error.message == "Unexpected response"
end
end

18 changes: 18 additions & 0 deletions test/langchain_error_test.exs
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
defmodule LangChain.LangChainErrorTest do
use ExUnit.Case
doctest LangChain.LangChainError

alias LangChain.LangChainError

describe "exception/1" do
test "supports creating with keyword list" do
original = RuntimeError.exception("testing")

error = LangChainError.exception(type: "test", message: "A test error", original: original)

assert error.type == "test"
assert error.message == "A test error"
assert error.original == original
end
end
end
28 changes: 22 additions & 6 deletions test/utils/chain_result_test.exs
Original file line number Diff line number Diff line change
@@ -11,8 +11,10 @@ defmodule LangChain.Utils.ChainResultTest do

describe "to_string/1" do
test "passes an error tuple through" do
assert {:error, %LLMChain{}, "original error"} ==
ChainResult.to_string({:error, %LLMChain{}, "original error"})
an_error = LangChainError.exception("original error")

assert {:error, %LLMChain{}, an_error} ==
ChainResult.to_string({:error, %LLMChain{}, an_error})
end

test "returns {:ok, answer} when valid" do
@@ -30,23 +32,32 @@ defmodule LangChain.Utils.ChainResultTest do

test "returns error when no last message" do
chain = %LLMChain{last_message: nil}
assert {:error, chain, "No last message"} == ChainResult.to_string(chain)
assert {:error, _chain, %LangChainError{} = error} = ChainResult.to_string(chain)

assert error.type == "to_string"
assert error.message == "No last message"
end

test "returns error when incomplete last message" do
chain = %LLMChain{
last_message: Message.new!(%{role: :assistant, content: "Incomplete", status: :length})
}

assert {:error, chain, "Message is incomplete"} == ChainResult.to_string(chain)
assert {:error, _chain, %LangChainError{} = error} = ChainResult.to_string(chain)

assert error.type == "to_string"
assert error.message == "Message is incomplete"
end

test "returns error when last message is not from assistant" do
chain = %LLMChain{
last_message: Message.new_user!("The question")
}

assert {:error, chain, "Message is not from assistant"} == ChainResult.to_string(chain)
assert {:error, _chain, %LangChainError{} = error} = ChainResult.to_string(chain)

assert error.type == "to_string"
assert error.message == "Message is not from assistant"
end

test "handles an LLMChain.run/2 success result" do
@@ -82,7 +93,12 @@ defmodule LangChain.Utils.ChainResultTest do
test "returns error tuple with reason when invalid" do
data = %{thing: "one"}
chain = %LLMChain{last_message: nil}
assert {:error, _chain, "No last message"} = ChainResult.to_map(chain, data, :answer)

assert {:error, _chain, %LangChainError{} = error} =
ChainResult.to_map(chain, data, :answer)

assert error.type == "to_string"
assert error.message == "No last message"
end
end

0 comments on commit d5d9068

Please sign in to comment.