diff --git a/.vscode/settings.json b/.vscode/settings.json index b24a2719..9dd1a864 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,6 +1,6 @@ { "cSpell.words": [ - "Langchain", + "LangChain", "openai" ] } \ No newline at end of file diff --git a/README.md b/README.md index 53a0615b..a7a445f4 100644 --- a/README.md +++ b/README.md @@ -72,19 +72,19 @@ config :langchain, openai_key: fn -> System.get_env("OPENAI_KEY") end ## Usage -The central module in this library is `Langchain.Chains.LLMChain`. Most other pieces are either inputs to this, or structures used by it. For understanding how to use the library, start there. +The central module in this library is `LangChain.Chains.LLMChain`. Most other pieces are either inputs to this, or structures used by it. For understanding how to use the library, start there. ### Exposing a custom Elixir function to ChatGPT -For an interactive example, refer to the project [Livebook notebook "Langchain: Executing Custom Elixir Functions"](notebooks/custom_functions.livemd). +For an interactive example, refer to the project [Livebook notebook "LangChain: Executing Custom Elixir Functions"](notebooks/custom_functions.livemd). The following is an example of a function that receives parameter arguments. ```elixir -alias Langchain.Function -alias Langchain.Message -alias Langchain.Chains.LLMChain -alias Langchain.ChatModels.ChatOpenAI +alias LangChain.Function +alias LangChain.Message +alias LangChain.Chains.LLMChain +alias LangChain.ChatModels.ChatOpenAI # map of data we want to be passed as `context` to the function when # executed. @@ -150,6 +150,6 @@ mix test Executing a specific test, wether it is a `live_call` or not, will execute it creating a potentially billable event. -When doing local development on the `Langchain` library itself, rename the `.envrc_template` to `.envrc` and populate it with your private API values. This is only used when running live test when explicitly requested. +When doing local development on the `LangChain` library itself, rename the `.envrc_template` to `.envrc` and populate it with your private API values. This is only used when running live test when explicitly requested. Use a tool like [Dotenv](https://github.com/motdotla/dotenv) to load the API values into the ENV when using the library locally. \ No newline at end of file diff --git a/lib/chains/data_extraction_chain.ex b/lib/chains/data_extraction_chain.ex index 2a877352..13e4355c 100644 --- a/lib/chains/data_extraction_chain.ex +++ b/lib/chains/data_extraction_chain.ex @@ -1,4 +1,4 @@ -defmodule Langchain.Chains.DataExtractionChain do +defmodule LangChain.Chains.DataExtractionChain do @moduledoc """ Defines an LLMChain for performing data extraction from a body of text. @@ -32,7 +32,7 @@ defmodule Langchain.Chains.DataExtractionChain do "Alex is 5 feet tall. Claudia is 4 feet taller than Alex and jumps higher than him. Claudia is a brunette and Alex is blonde. Alex's dog Frosty is a labrador and likes to play hide and seek." - {:ok, result} = Langchain.Chains.DataExtractionChain.run(chat, schema_parameters, data_prompt) + {:ok, result} = LangChain.Chains.DataExtractionChain.run(chat, schema_parameters, data_prompt) # Example result [ @@ -54,9 +54,9 @@ defmodule Langchain.Chains.DataExtractionChain do """ use Ecto.Schema require Logger - alias Langchain.PromptTemplate - alias Langchain.Message - alias Langchain.Chains.LLMChain + alias LangChain.PromptTemplate + alias LangChain.Message + alias LangChain.Chains.LLMChain @extraction_template ~s"Extract and save the relevant entities mentioned in the following passage together with their properties. @@ -109,9 +109,9 @@ defmodule Langchain.Chains.DataExtractionChain do Build the function to expose to the LLM that can be called for data extraction. """ - @spec build_extract_function(json_schema :: map()) :: Langchain.Function.t() | no_return() + @spec build_extract_function(json_schema :: map()) :: LangChain.Function.t() | no_return() def build_extract_function(json_schema) do - Langchain.Function.new!(%{ + LangChain.Function.new!(%{ name: "information_extraction", description: "Extracts the relevant information from the passage.", parameters_schema: %{ diff --git a/lib/chains/llm_chain.ex b/lib/chains/llm_chain.ex index 3da15e1e..918fb412 100644 --- a/lib/chains/llm_chain.ex +++ b/lib/chains/llm_chain.ex @@ -1,4 +1,4 @@ -defmodule Langchain.Chains.LLMChain do +defmodule LangChain.Chains.LLMChain do @doc """ Define an LLMChain. This is the heart of the LangChain library. @@ -14,12 +14,12 @@ defmodule Langchain.Chains.LLMChain do use Ecto.Schema import Ecto.Changeset require Logger - alias Langchain.PromptTemplate + alias LangChain.PromptTemplate alias __MODULE__ - alias Langchain.Message - alias Langchain.MessageDelta - alias Langchain.Function - alias Langchain.LangchainError + alias LangChain.Message + alias LangChain.MessageDelta + alias LangChain.Function + alias LangChain.LangChainError @primary_key false embedded_schema do @@ -81,7 +81,7 @@ defmodule Langchain.Chains.LLMChain do chain {:error, changeset} -> - raise LangchainError, changeset + raise LangChainError, changeset end end diff --git a/lib/chat_models/chat_open_ai.ex b/lib/chat_models/chat_open_ai.ex index 11842063..f9f9e661 100644 --- a/lib/chat_models/chat_open_ai.ex +++ b/lib/chat_models/chat_open_ai.ex @@ -1,10 +1,10 @@ -defmodule Langchain.ChatModels.ChatOpenAI do +defmodule LangChain.ChatModels.ChatOpenAI do @moduledoc """ Represents the [OpenAI ChatModel](https://platform.openai.com/docs/api-reference/chat/create). Parses and validates inputs for making a requests from the OpenAI Chat API. - Converts responses into more specialized `Langchain` data structures. + Converts responses into more specialized `LangChain` data structures. - https://github.com/openai/openai-cookbook/blob/main/examples/How_to_call_functions_with_chat_models.ipynb @@ -12,14 +12,14 @@ defmodule Langchain.ChatModels.ChatOpenAI do use Ecto.Schema require Logger import Ecto.Changeset - import Langchain.Utils.ApiOverride + import LangChain.Utils.ApiOverride alias __MODULE__ - alias Langchain.Config - alias Langchain.Message - alias Langchain.LangchainError - alias Langchain.ForOpenAIApi - alias Langchain.Utils - alias Langchain.MessageDelta + alias LangChain.Config + alias LangChain.Message + alias LangChain.LangChainError + alias LangChain.ForOpenAIApi + alias LangChain.Utils + alias LangChain.MessageDelta # NOTE: As of gpt-4 and gpt-3.5, only one function_call is issued at a time # even when multiple requests could be issued based on the prompt. @@ -87,7 +87,7 @@ defmodule Langchain.ChatModels.ChatOpenAI do chain {:error, changeset} -> - raise LangchainError, changeset + raise LangChainError, changeset end end @@ -133,19 +133,19 @@ defmodule Langchain.ChatModels.ChatOpenAI do received from the API. **NOTE:** This function *can* be used directly, but the primary interface - should be through `Langchain.Chains.LLMChain`. The `ChatOpenAI` module is more focused on - translating the `Langchain` data structures to and from the OpenAI API. + should be through `LangChain.Chains.LLMChain`. The `ChatOpenAI` module is more focused on + translating the `LangChain` data structures to and from the OpenAI API. - Another benefit of using `Langchain.Chains.LLMChain` is that it combines the + Another benefit of using `LangChain.Chains.LLMChain` is that it combines the storage of messages, adding functions, adding custom context that should be - passed to functions, and automatically applying `Langchain.MessageDelta` + passed to functions, and automatically applying `LangChain.MessageDelta` structs as they are are received, then converting those to the full - `Langchain.Message` once fully complete. + `LangChain.Message` once fully complete. """ @spec call( t(), String.t() | [Message.t()], - [Langchain.Function.t()], + [LangChain.Function.t()], nil | (Message.t() | MessageDelta.t() -> any()) ) :: call_response() def call(openai, prompt, functions \\ [], callback_fn \\ nil) @@ -170,7 +170,7 @@ defmodule Langchain.ChatModels.ChatOpenAI do response _other -> - raise LangchainError, + raise LangChainError, "An unexpected fake API response was set. Should be an `{:ok, value}`" end else @@ -184,7 +184,7 @@ defmodule Langchain.ChatModels.ChatOpenAI do {:ok, parsed_data} end rescue - err in LangchainError -> + err in LangChainError -> {:error, err.message} end end @@ -258,7 +258,7 @@ defmodule Langchain.ChatModels.ChatOpenAI do # # body: [ # [ - # %Langchain.MessageDelta{ + # %LangChain.MessageDelta{ # content: nil, # index: 0, # function_name: nil, @@ -279,7 +279,7 @@ defmodule Langchain.ChatModels.ChatOpenAI do {request, response} {:error, %Mint.TransportError{reason: :timeout}} -> - {request, LangchainError.exception("Request timed out")} + {request, LangChainError.exception("Request timed out")} {:error, exception} -> Logger.error("Failed request to API: #{inspect(exception)}") @@ -308,7 +308,7 @@ defmodule Langchain.ChatModels.ChatOpenAI do {:ok, %Req.Response{body: data}} -> data - {:error, %LangchainError{message: reason}} -> + {:error, %LangChainError{message: reason}} -> {:error, reason} other -> @@ -361,7 +361,7 @@ defmodule Langchain.ChatModels.ChatOpenAI do # return the error |> case do [{:error, reason}] -> - raise LangchainError, reason + raise LangChainError, reason other -> other diff --git a/lib/config.ex b/lib/config.ex index fd843dfb..17c3b606 100644 --- a/lib/config.ex +++ b/lib/config.ex @@ -1,4 +1,4 @@ -defmodule Langchain.Config do +defmodule LangChain.Config do @moduledoc """ Utility that handles interaction with the application's configuration. """ diff --git a/lib/for_open_ai_api.ex b/lib/for_open_ai_api.ex index b429a9c2..3c22c232 100644 --- a/lib/for_open_ai_api.ex +++ b/lib/for_open_ai_api.ex @@ -1,6 +1,6 @@ -defprotocol Langchain.ForOpenAIApi do +defprotocol LangChain.ForOpenAIApi do @moduledoc """ - A protocol that defines a way for converting the Langchain Elixir data structs + A protocol that defines a way for converting the LangChain Elixir data structs to an OpenAI supported data structure and format for making an API call. """ @@ -12,6 +12,6 @@ defprotocol Langchain.ForOpenAIApi do def for_api(struct) end -defimpl Langchain.ForOpenAIApi, for: Any do +defimpl LangChain.ForOpenAIApi, for: Any do def for_api(_struct), do: nil end diff --git a/lib/function.ex b/lib/function.ex index 68e9b4ee..a552cc97 100644 --- a/lib/function.ex +++ b/lib/function.ex @@ -1,8 +1,8 @@ -# {:ok, f} = Langchain.Function.new(%{name: "register_person", description: "Register a new person in the system", required: ["name"], parameters: [p_name, p_age]}) +# {:ok, f} = LangChain.Function.new(%{name: "register_person", description: "Register a new person in the system", required: ["name"], parameters: [p_name, p_age]}) # NOTE: New in OpenAI - https://openai.com/blog/function-calling-and-other-api-updates # - 13 June 2023 -# NOTE: Pretty much takes the place of a Langchain "Tool". -defmodule Langchain.Function do +# NOTE: Pretty much takes the place of a LangChain "Tool". +defmodule LangChain.Function do @moduledoc """ Defines a "function" that can be provided to an LLM for the LLM to optionally execute and pass argument data to. @@ -25,7 +25,7 @@ defmodule Langchain.Function do This example defines a function that an LLM can execute for performing basic math calculations. **NOTE:** This is a partial implementation of the - `Langchain.Tools.Calculator`. + `LangChain.Tools.Calculator`. Function.new(%{ name: "calculator", @@ -50,7 +50,7 @@ defmodule Langchain.Function do map. The `context` argument is passed through as the `context` on a - `Langchain.Chains.LLMChain`. This is whatever context data is needed for the + `LangChain.Chains.LLMChain`. This is whatever context data is needed for the function to do it's work. Context examples may be user_id, account_id, account struct, billing level, @@ -66,7 +66,7 @@ defmodule Langchain.Function do import Ecto.Changeset require Logger alias __MODULE__ - alias Langchain.LangchainError + alias LangChain.LangChainError @primary_key false embedded_schema do @@ -106,7 +106,7 @@ defmodule Langchain.Function do function {:error, changeset} -> - raise LangchainError, changeset + raise LangChainError, changeset end end @@ -118,7 +118,7 @@ defmodule Langchain.Function do @doc """ Execute the function passing in arguments and additional optional context. - This is called by a `Langchain.Chains.LLMChain` when a `Function` execution is + This is called by a `LangChain.Chains.LLMChain` when a `Function` execution is requested by the LLM. """ def execute(%Function{function: fun} = function, arguments, context) do @@ -127,8 +127,8 @@ defmodule Langchain.Function do end end -defimpl Langchain.ForOpenAIApi, for: Langchain.Function do - alias Langchain.Function +defimpl LangChain.ForOpenAIApi, for: LangChain.Function do + alias LangChain.Function def for_api(%Function{} = fun) do %{ diff --git a/lib/gettext.ex b/lib/gettext.ex index 10466d37..4d471d78 100644 --- a/lib/gettext.ex +++ b/lib/gettext.ex @@ -1,11 +1,11 @@ -defmodule Langchain.Gettext do +defmodule LangChain.Gettext do @moduledoc """ A module providing Internationalization with a gettext-based API. By using [Gettext](https://hexdocs.pm/gettext), your module gains a set of macros for translations, for example: - import Langchain.Gettext + import LangChain.Gettext # Simple translation gettext("Here is the string to translate") diff --git a/lib/langchain_error.ex b/lib/langchain_error.ex index 470bee50..a3385d72 100644 --- a/lib/langchain_error.ex +++ b/lib/langchain_error.ex @@ -1,19 +1,19 @@ -defmodule Langchain.LangchainError do +defmodule LangChain.LangChainError do @moduledoc """ - Exception used for raising Langchain specific errors. + Exception used for raising LangChain specific errors. It stores the `:message`. Passing an Ecto.Changeset with an error converts the error into a string message. - raise LangchainError, changeset + raise LangChainError, changeset - raise LangchainError, "Message text" + raise LangChainError, "Message text" """ - import Langchain.Utils, only: [changeset_error_to_string: 1] + import LangChain.Utils, only: [changeset_error_to_string: 1] alias __MODULE__ - @type t :: %LangchainError{} + @type t :: %LangChainError{} defexception [:message] @@ -23,11 +23,11 @@ defmodule Langchain.LangchainError do """ @spec exception(message :: String.t() | Ecto.Changeset.t()) :: t() def exception(message) when is_binary(message) do - %LangchainError{message: message} + %LangChainError{message: message} end def exception(%Ecto.Changeset{} = changeset) do text_reason = changeset_error_to_string(changeset) - %LangchainError{message: text_reason} + %LangChainError{message: text_reason} end end diff --git a/lib/message.ex b/lib/message.ex index 9ec5f52d..ce662539 100644 --- a/lib/message.ex +++ b/lib/message.ex @@ -1,4 +1,4 @@ -defmodule Langchain.Message do +defmodule LangChain.Message do @moduledoc """ Models a complete `Message` for a chat LLM. @@ -34,7 +34,7 @@ defmodule Langchain.Message do import Ecto.Changeset require Logger alias __MODULE__ - alias Langchain.LangchainError + alias LangChain.LangChainError @primary_key false embedded_schema do @@ -78,7 +78,7 @@ defmodule Langchain.Message do message {:error, changeset} -> - raise LangchainError, changeset + raise LangChainError, changeset end end @@ -191,7 +191,7 @@ defmodule Langchain.Message do msg {:error, %Ecto.Changeset{} = changeset} -> - raise LangchainError, changeset + raise LangChainError, changeset end end @@ -215,7 +215,7 @@ defmodule Langchain.Message do msg {:error, %Ecto.Changeset{} = changeset} -> - raise LangchainError, changeset + raise LangChainError, changeset end end @@ -238,7 +238,7 @@ defmodule Langchain.Message do msg {:error, %Ecto.Changeset{} = changeset} -> - raise LangchainError, changeset + raise LangChainError, changeset end end @@ -272,7 +272,7 @@ defmodule Langchain.Message do msg {:error, %Ecto.Changeset{} = changeset} -> - raise LangchainError, changeset + raise LangChainError, changeset end end @@ -297,7 +297,7 @@ defmodule Langchain.Message do msg {:error, %Ecto.Changeset{} = changeset} -> - raise LangchainError, changeset + raise LangChainError, changeset end end @@ -311,8 +311,8 @@ defmodule Langchain.Message do def is_function_call?(%Message{}), do: false end -defimpl Langchain.ForOpenAIApi, for: Langchain.Message do - alias Langchain.Message +defimpl LangChain.ForOpenAIApi, for: LangChain.Message do + alias LangChain.Message def for_api(%Message{role: :assistant, function_name: fun_name} = fun) when is_binary(fun_name) do %{ diff --git a/lib/message_delta.ex b/lib/message_delta.ex index a3900c84..a3c17cea 100644 --- a/lib/message_delta.ex +++ b/lib/message_delta.ex @@ -1,11 +1,11 @@ -defmodule Langchain.MessageDelta do +defmodule LangChain.MessageDelta do @moduledoc """ Models a "delta" message from a chat LLM. A delta is a small chunk, or piece of a much larger complete message. A series of deltas can are used to construct a complete message. Delta messages must be applied in order for them to be valid. Delta messages - can be combined and transformed into a `Langchain.Message` once the final + can be combined and transformed into a `LangChain.Message` once the final piece is received. ## Roles @@ -28,9 +28,9 @@ defmodule Langchain.MessageDelta do import Ecto.Changeset require Logger alias __MODULE__ - alias Langchain.LangchainError - alias Langchain.Message - alias Langchain.Utils + alias LangChain.LangChainError + alias LangChain.Message + alias LangChain.Utils @primary_key false embedded_schema do @@ -76,7 +76,7 @@ defmodule Langchain.MessageDelta do message {:error, changeset} -> - raise LangchainError, changeset + raise LangChainError, changeset end end @@ -85,7 +85,7 @@ defmodule Langchain.MessageDelta do one that smaller deltas are merged into. iex> delta_1 = - ...> %Langchain.MessageDelta{ + ...> %LangChain.MessageDelta{ ...> content: nil, ...> index: 0, ...> function_name: nil, @@ -94,7 +94,7 @@ defmodule Langchain.MessageDelta do ...> status: :incomplete ...> } iex> delta_2 = - ...> %Langchain.MessageDelta{ + ...> %LangChain.MessageDelta{ ...> content: "Hello", ...> index: 0, ...> function_name: nil, @@ -102,8 +102,8 @@ defmodule Langchain.MessageDelta do ...> arguments: nil, ...> status: :incomplete ...> } - iex> Langchain.MessageDelta.merge_delta(delta_1, delta_2) - %Langchain.MessageDelta{content: "Hello", status: :incomplete, index: 0, function_name: nil, role: :assistant, arguments: nil} + iex> LangChain.MessageDelta.merge_delta(delta_1, delta_2) + %LangChain.MessageDelta{content: "Hello", status: :incomplete, index: 0, function_name: nil, role: :assistant, arguments: nil} A set of deltas can be easily merged like this: @@ -210,7 +210,7 @@ defmodule Langchain.MessageDelta do This is assumed to be the result of merging all the received `MessageDelta`s. An error is returned if the `status` is `:incomplete`. - If the `MessageDelta` fails to convert to a `Langchain.Message`, an error is + If the `MessageDelta` fails to convert to a `LangChain.Message`, an error is returned with the reason. """ @spec to_message(t()) :: {:ok, Message.t()} | {:error, String.t()} diff --git a/lib/prompt_template.ex b/lib/prompt_template.ex index 71339353..60a29cd3 100644 --- a/lib/prompt_template.ex +++ b/lib/prompt_template.ex @@ -1,4 +1,4 @@ -defmodule Langchain.PromptTemplate do +defmodule LangChain.PromptTemplate do @moduledoc """ Enables defining a prompt, optionally as a template, but delaying the final building of it until a later time when input values are substituted in. @@ -10,7 +10,7 @@ defmodule Langchain.PromptTemplate do # Create a template and convert it to a message prompt = PromptTemplate.new!(%{text: "My template", role: :user}) - %Langchain.Message{} = message = PromptTemplate.to_message(prompt) + %LangChain.Message{} = message = PromptTemplate.to_message(prompt) PromptTemplates are powerful because they support Elixir's EEx templates allowing for parameter substitution. This is helpful when we want to prepare a @@ -31,8 +31,8 @@ defmodule Langchain.PromptTemplate do import Ecto.Changeset require Logger alias __MODULE__ - alias Langchain.LangchainError - alias Langchain.Message + alias LangChain.LangChainError + alias LangChain.Message @primary_key false embedded_schema do @@ -65,7 +65,7 @@ defmodule Langchain.PromptTemplate do prompt {:error, changeset} -> - raise LangchainError, changeset + raise LangChainError, changeset end end @@ -256,14 +256,14 @@ defmodule Langchain.PromptTemplate do {key, other}, _acc -> msg = "Unsupported `composed_of` entry for #{inspect(key)}: #{inspect(other)}" Logger.error(msg) - raise LangchainError, msg + raise LangChainError, msg end) PromptTemplate.format(full_prompt, composed_inputs) end @doc """ - Transform a PromptTemplate to a `Langchain.Message`. Provide the inputs at the time of + Transform a PromptTemplate to a `LangChain.Message`. Provide the inputs at the time of transformation to render the final content. """ @spec to_message(t(), input :: %{atom() => any()}) :: @@ -274,7 +274,7 @@ defmodule Langchain.PromptTemplate do end @doc """ - Transform a PromptTemplate to a `Langchain.Message`. Provide the inputs at the time of + Transform a PromptTemplate to a `LangChain.Message`. Provide the inputs at the time of transformation to render the final content. Raises an exception if invalid. """ @spec to_message!(t(), input :: %{atom() => any()}) :: Message.t() | no_return() @@ -284,7 +284,7 @@ defmodule Langchain.PromptTemplate do end @doc """ - Transform a list of PromptTemplates into a list of `Langchain.Message`s. + Transform a list of PromptTemplates into a list of `LangChain.Message`s. Applies the inputs to the list of prompt templates. If any of the prompt entries are invalid or fail, an exception is raised. """ diff --git a/lib/tools/calculator.ex b/lib/tools/calculator.ex index dbd3ddd5..f39df733 100644 --- a/lib/tools/calculator.ex +++ b/lib/tools/calculator.ex @@ -1,8 +1,8 @@ -defmodule Langchain.Tools.Calculator do +defmodule LangChain.Tools.Calculator do @moduledoc """ Defines a Calculator tool for performing basic math calculations. - This is an example of a pre-built `Langchain.Function` that is designed and + This is an example of a pre-built `LangChain.Function` that is designed and configured for a specific purpose. This defines a function to expose to an LLM and provides an implementation for @@ -13,7 +13,7 @@ defmodule Langchain.Tools.Calculator do * make repeated calls to run the chain as the tool is called and the results are then made available to the LLM before it returns the final result. * OR run the chain using the `while_needs_response: true` option like this: - `Langchain.LLMChain.run(chain, while_needs_response: true)` + `LangChain.LLMChain.run(chain, while_needs_response: true)` ## Example @@ -32,7 +32,7 @@ defmodule Langchain.Tools.Calculator do Verbose log output: - LLM: %Langchain.ChatModels.ChatOpenAI{ + LLM: %LangChain.ChatModels.ChatOpenAI{ endpoint: "https://api.openai.com/v1/chat/completions", model: "gpt-3.5-turbo", temperature: 0.0, @@ -42,7 +42,7 @@ defmodule Langchain.Tools.Calculator do stream: false } MESSAGES: [ - %Langchain.Message{ + %LangChain.Message{ content: "Answer the following math question: What is 100 + 300 - 200?", index: nil, status: :complete, @@ -52,10 +52,10 @@ defmodule Langchain.Tools.Calculator do } ] FUNCTIONS: [ - %Langchain.Function{ + %LangChain.Function{ name: "calculator", description: "Perform basic math calculations", - function: #Function<0.108164323/2 in Langchain.Tools.Calculator.execute>, + function: #Function<0.108164323/2 in LangChain.Tools.Calculator.execute>, parameters_schema: %{ properties: %{ expression: %{ @@ -68,7 +68,7 @@ defmodule Langchain.Tools.Calculator do } } ] - SINGLE MESSAGE RESPONSE: %Langchain.Message{ + SINGLE MESSAGE RESPONSE: %LangChain.Message{ content: nil, index: 0, status: :complete, @@ -78,7 +78,7 @@ defmodule Langchain.Tools.Calculator do } EXECUTING FUNCTION: "calculator" FUNCTION RESULT: "200" - SINGLE MESSAGE RESPONSE: %Langchain.Message{ + SINGLE MESSAGE RESPONSE: %LangChain.Message{ content: "The answer to the math question \"What is 100 + 300 - 200?\" is 200.", index: 0, status: :complete, @@ -89,7 +89,7 @@ defmodule Langchain.Tools.Calculator do """ require Logger - alias Langchain.Function + alias LangChain.Function @doc """ Define the "calculator" function. Returns a success/failure response. @@ -120,7 +120,7 @@ defmodule Langchain.Tools.Calculator do function {:error, changeset} -> - raise Langchain.LangchainError, changeset + raise LangChain.LangChainError, changeset end end diff --git a/lib/utils.ex b/lib/utils.ex index 1265b0a4..82c7c542 100644 --- a/lib/utils.ex +++ b/lib/utils.ex @@ -1,4 +1,4 @@ -defmodule Langchain.Utils do +defmodule LangChain.Utils do @moduledoc """ Collection of helpful utilities mostly for internal use. """ @@ -33,9 +33,9 @@ defmodule Langchain.Utils do # with our gettext backend as first argument. Translations are # available in the errors.po file (as we use the "errors" domain). if count = opts[:count] do - Gettext.dngettext(Langchain.Gettext, "errors", msg, msg, count, opts) + Gettext.dngettext(LangChain.Gettext, "errors", msg, msg, count, opts) else - Gettext.dgettext(Langchain.Gettext, "errors", msg, opts) + Gettext.dgettext(LangChain.Gettext, "errors", msg, opts) end end diff --git a/lib/utils/api_override.ex b/lib/utils/api_override.ex index fdf8f94d..6aba7d90 100644 --- a/lib/utils/api_override.ex +++ b/lib/utils/api_override.ex @@ -1,4 +1,4 @@ -defmodule Langchain.Utils.ApiOverride do +defmodule LangChain.Utils.ApiOverride do @moduledoc """ Tools for overriding API results. Used for testing. @@ -6,7 +6,7 @@ defmodule Langchain.Utils.ApiOverride do ## Test Example - import Langchain.Utils.ApiOverride + import LangChain.Utils.ApiOverride model = ChatOpenAI.new!(%{temperature: 1, stream: true}) diff --git a/mix.exs b/mix.exs index 1c84f45d..387202ea 100644 --- a/mix.exs +++ b/mix.exs @@ -1,4 +1,4 @@ -defmodule Langchain.MixProject do +defmodule LangChain.MixProject do use Mix.Project @version "0.1.0" diff --git a/notebooks/custom_functions.livemd b/notebooks/custom_functions.livemd index 4643913e..e6e1c5cc 100644 --- a/notebooks/custom_functions.livemd +++ b/notebooks/custom_functions.livemd @@ -52,7 +52,7 @@ With an Elixir function defined, we will wrap it in a LangChain `Function` struc This is what that looks like: ```elixir -alias Langchain.Function +alias LangChain.Function function = Function.new!(%{ @@ -69,7 +69,7 @@ function = ``` -%Langchain.Function{ +%LangChain.Function{ name: "get_user_info", description: "Return JSON object of the current users's relevant information.", function: #Function<41.3316493/2 in :erl_eval.expr/6>, @@ -111,7 +111,7 @@ Application.put_env(:langchain, :openai_key, System.fetch_env!("LB_OPENAI_KEY")) ## Defining our AI Assistant -We'll use the `Langchain.Message` struct to define the messages for what we want the LLM to do. Our `system` message instructs the LLM how to behave. +We'll use the `LangChain.Message` struct to define the messages for what we want the LLM to do. Our `system` message instructs the LLM how to behave. In this example, we want the assistant to generate Haiku poems about the current user's favorite animals. However, we only want it to work for users who are "members" and not "trial" users. @@ -120,7 +120,7 @@ The instructions we're giving the LLM will require it to execute the function to What we're demonstrating here is that the LLM can interact with our Elixir application, use multiple pieces of returned information to make business logic decisions and fullfil our system requests. ```elixir -alias Langchain.Message +alias LangChain.Message messages = [ Message.new_system!(~s(You are a helpful haiku poem generating assistant. @@ -135,7 +135,7 @@ messages = [ ``` [ - %Langchain.Message{ + %LangChain.Message{ content: "You are a helpful haiku poem generating assistant. \n ONLY generate a haiku for users with an `account_type` of \"member\". \n If the user has an `account_type` of \"trial\", say you can't do it, \n but you would love to help them if they upgrade and become a member.", index: nil, status: :complete, @@ -143,7 +143,7 @@ messages = [ function_name: nil, arguments: nil }, - %Langchain.Message{ + %LangChain.Message{ content: "The current user is requesting a Haiku poem about their favorite animal.", index: nil, status: :complete, @@ -161,7 +161,7 @@ For this example, we're talking to OpenAI's ChatGPT service. Let's setup that mo For the kind of work we're asking it to do, GPT-4 does a better job than previous model versions. We'll specify we want "gpt-4". ```elixir -alias Langchain.ChatModels.ChatOpenAI +alias LangChain.ChatModels.ChatOpenAI chat_model = ChatOpenAI.new!(%{model: "gpt-4", temperature: 1, stream: false}) ``` @@ -169,7 +169,7 @@ chat_model = ChatOpenAI.new!(%{model: "gpt-4", temperature: 1, stream: false}) ``` -%Langchain.ChatModels.ChatOpenAI{ +%LangChain.ChatModels.ChatOpenAI{ endpoint: "https://api.openai.com/v1/chat/completions", model: "gpt-4", temperature: 1.0, @@ -182,7 +182,7 @@ chat_model = ChatOpenAI.new!(%{model: "gpt-4", temperature: 1, stream: false}) ## Defining our Application's User Context -Here we'll define some special context that we want passed through to our `Langchain.Function` when it is executed. +Here we'll define some special context that we want passed through to our `LangChain.Function` when it is executed. In a real application, this might be session based user or account information. It's whatever is relevant to our application that changes how and what a function should operate. @@ -209,7 +209,7 @@ Also, note the `verbose: true` setting. That causes a number of `IO.inspect` cal Additionally, the `stream: false` option says we want the result only when it's complete. This example isn't setup for receving a streaming response. We're keeping it simple! ```elixir -alias Langchain.Chains.LLMChain +alias LangChain.Chains.LLMChain {:ok, updated_chain, response} = %{llm: chat_model, custom_context: context, verbose: true} @@ -229,7 +229,7 @@ response.content ``` -LLM: %Langchain.ChatModels.ChatOpenAI{ +LLM: %LangChain.ChatModels.ChatOpenAI{ endpoint: "https://api.openai.com/v1/chat/completions", model: "gpt-4", temperature: 1.0, @@ -239,7 +239,7 @@ LLM: %Langchain.ChatModels.ChatOpenAI{ stream: false } MESSAGES: [ - %Langchain.Message{ + %LangChain.Message{ content: "You are a helpful haiku poem generating assistant. \n ONLY generate a haiku for users with an `account_type` of \"member\". \n If the user has an `account_type` of \"trial\", say you can't do it, \n but you would love to help them if they upgrade and become a member.", index: nil, status: :complete, @@ -247,7 +247,7 @@ MESSAGES: [ function_name: nil, arguments: nil }, - %Langchain.Message{ + %LangChain.Message{ content: "The current user is requesting a Haiku poem about their favorite animal.", index: nil, status: :complete, @@ -257,14 +257,14 @@ MESSAGES: [ } ] FUNCTIONS: [ - %Langchain.Function{ + %LangChain.Function{ name: "get_user_info", description: "Return JSON object of the current users's relevant information.", function: #Function<41.3316493/2 in :erl_eval.expr/6>, parameters_schema: nil } ] -SINGLE MESSAGE RESPONSE: %Langchain.Message{ +SINGLE MESSAGE RESPONSE: %LangChain.Message{ content: nil, index: 0, status: :complete, @@ -276,7 +276,7 @@ EXECUTING FUNCTION: "get_user_info" 10:31:04.338 [debug] Executing function "get_user_info" FUNCTION RESULT: "{\"account_type\":\"member\",\"favorite_animal\":\"Aardvark\",\"name\":\"Joan Jett\",\"user_id\":2}" -SINGLE MESSAGE RESPONSE: %Langchain.Message{ +SINGLE MESSAGE RESPONSE: %LangChain.Message{ content: "Delving in darkness,\nAardvark, nature's architect,\nJoan's spirit animal.", index: 0, status: :complete, diff --git a/test/chains/data_extraction_chain_test.exs b/test/chains/data_extraction_chain_test.exs index 72c04e24..51f1bcf6 100644 --- a/test/chains/data_extraction_chain_test.exs +++ b/test/chains/data_extraction_chain_test.exs @@ -1,8 +1,8 @@ -defmodule Langchain.Chains.DataExtractionChainTest do - use Langchain.BaseCase +defmodule LangChain.Chains.DataExtractionChainTest do + use LangChain.BaseCase - doctest Langchain.Chains.DataExtractionChain - alias Langchain.ChatModels.ChatOpenAI + doctest LangChain.Chains.DataExtractionChain + alias LangChain.ChatModels.ChatOpenAI # Extraction - https://js.langchain.com/docs/modules/chains/openai_functions/extraction @tag :live_call @@ -28,7 +28,7 @@ defmodule Langchain.Chains.DataExtractionChainTest do "Alex is 5 feet tall. Claudia is 4 feet taller than Alex and jumps higher than him. Claudia is a brunette and Alex is blonde. Alex's dog Frosty is a labrador and likes to play hide and seek." - {:ok, result} = Langchain.Chains.DataExtractionChain.run(chat, schema_parameters, data_prompt, verbose: true) + {:ok, result} = LangChain.Chains.DataExtractionChain.run(chat, schema_parameters, data_prompt, verbose: true) assert result == [ %{ diff --git a/test/chains/llm_chain_test.exs b/test/chains/llm_chain_test.exs index 172b603e..4ba4607e 100644 --- a/test/chains/llm_chain_test.exs +++ b/test/chains/llm_chain_test.exs @@ -1,13 +1,13 @@ -defmodule Langchain.Chains.LLMChainTest do - use Langchain.BaseCase +defmodule LangChain.Chains.LLMChainTest do + use LangChain.BaseCase - doctest Langchain.Chains.LLMChain - alias Langchain.ChatModels.ChatOpenAI - alias Langchain.Chains.LLMChain - alias Langchain.PromptTemplate - alias Langchain.Function - alias Langchain.Message - alias Langchain.MessageDelta + doctest LangChain.Chains.LLMChain + alias LangChain.ChatModels.ChatOpenAI + alias LangChain.Chains.LLMChain + alias LangChain.PromptTemplate + alias LangChain.Function + alias LangChain.Message + alias LangChain.MessageDelta setup do {:ok, chat} = ChatOpenAI.new(%{temperature: 0}) @@ -320,7 +320,7 @@ defmodule Langchain.Chains.LLMChainTest do end test "applies list of deltas for function_call with arguments", %{chain: chain} do - deltas = Langchain.Fixtures.deltas_for_function_call("calculator") + deltas = LangChain.Fixtures.deltas_for_function_call("calculator") updated_chain = Enum.reduce(deltas, chain, fn delta, acc -> diff --git a/test/chat_models/chat_open_ai_test.exs b/test/chat_models/chat_open_ai_test.exs index fa05826e..a89da161 100644 --- a/test/chat_models/chat_open_ai_test.exs +++ b/test/chat_models/chat_open_ai_test.exs @@ -1,10 +1,10 @@ -defmodule Langchain.ChatModels.ChatOpenAITest do - use Langchain.BaseCase - import Langchain.Fixtures +defmodule LangChain.ChatModels.ChatOpenAITest do + use LangChain.BaseCase + import LangChain.Fixtures - doctest Langchain.ChatModels.ChatOpenAI - alias Langchain.ChatModels.ChatOpenAI - alias Langchain.Function + doctest LangChain.ChatModels.ChatOpenAI + alias LangChain.ChatModels.ChatOpenAI + alias LangChain.Function setup do {:ok, hello_world} = @@ -205,7 +205,7 @@ defmodule Langchain.ChatModels.ChatOpenAITest do end test "handles receiving a delta message for a content message at different parts" do - delta_content = Langchain.Fixtures.raw_deltas_for_content() + delta_content = LangChain.Fixtures.raw_deltas_for_content() msg_1 = Enum.at(delta_content, 0) msg_2 = Enum.at(delta_content, 1) @@ -249,7 +249,7 @@ defmodule Langchain.ChatModels.ChatOpenAITest do end test "handles receiving a delta message for a function_call" do - delta_function = Langchain.Fixtures.raw_deltas_for_function_call() + delta_function = LangChain.Fixtures.raw_deltas_for_function_call() msg_1 = Enum.at(delta_function, 0) msg_2 = Enum.at(delta_function, 1) @@ -334,7 +334,7 @@ defmodule Langchain.ChatModels.ChatOpenAITest do Message.new_user("Answer the following math question: What is 100 + 300 - 200?") _response = - ChatOpenAI.do_api_request(chat, [message], [Langchain.Tools.Calculator.new!()], callback) + ChatOpenAI.do_api_request(chat, [message], [LangChain.Tools.Calculator.new!()], callback) # IO.inspect(response, label: "OPEN AI POST RESPONSE") diff --git a/test/for_open_ai_api_test.exs b/test/for_open_ai_api_test.exs index 6e57ded5..d3013adb 100644 --- a/test/for_open_ai_api_test.exs +++ b/test/for_open_ai_api_test.exs @@ -1,8 +1,8 @@ -defmodule Langchain.ForOpenAIApiTest do +defmodule LangChain.ForOpenAIApiTest do use ExUnit.Case - doctest Langchain.ForOpenAIApi - alias Langchain.ForOpenAIApi - alias Langchain.Message + doctest LangChain.ForOpenAIApi + alias LangChain.ForOpenAIApi + alias LangChain.Message describe "for_api/1" do test "turns a function_call into expected JSON format" do diff --git a/test/function_test.exs b/test/function_test.exs index 4f0c434c..d9cfa313 100644 --- a/test/function_test.exs +++ b/test/function_test.exs @@ -1,10 +1,10 @@ -defmodule Langchain.FunctionTest do +defmodule LangChain.FunctionTest do use ExUnit.Case - doctest Langchain.Function + doctest LangChain.Function - alias Langchain.Function - alias Langchain.ForOpenAIApi + alias LangChain.Function + alias LangChain.ForOpenAIApi defp hello_world(_args, _context) do "Hello world!" diff --git a/test/message_delta_test.exs b/test/message_delta_test.exs index d0a8874b..d3871933 100644 --- a/test/message_delta_test.exs +++ b/test/message_delta_test.exs @@ -1,10 +1,10 @@ -defmodule Langchain.MessageDeltaTest do +defmodule LangChain.MessageDeltaTest do use ExUnit.Case - doctest Langchain.MessageDelta - import Langchain.Fixtures - alias Langchain.Message - alias Langchain.MessageDelta - alias Langchain.LangchainError + doctest LangChain.MessageDelta + import LangChain.Fixtures + alias LangChain.Message + alias LangChain.MessageDelta + alias LangChain.LangChainError describe "new/1" do test "works with minimal attrs" do @@ -88,7 +88,7 @@ defmodule Langchain.MessageDeltaTest do end test "raises exception when invalid" do - assert_raise LangchainError, "role: is invalid; index: is invalid", fn -> + assert_raise LangChainError, "role: is invalid; index: is invalid", fn -> MessageDelta.new!(%{role: "invalid", index: "abc"}) end end @@ -103,7 +103,7 @@ defmodule Langchain.MessageDeltaTest do MessageDelta.merge_delta(acc, new_delta) end) - expected = %Langchain.MessageDelta{ + expected = %LangChain.MessageDelta{ content: "Hello! How can I assist you today?", index: 0, function_name: nil, @@ -123,7 +123,7 @@ defmodule Langchain.MessageDeltaTest do MessageDelta.merge_delta(acc, new_delta) end) - expected = %Langchain.MessageDelta{ + expected = %LangChain.MessageDelta{ content: nil, index: 0, function_name: "hello_world", @@ -143,7 +143,7 @@ defmodule Langchain.MessageDeltaTest do MessageDelta.merge_delta(acc, new_delta) end) - expected = %Langchain.MessageDelta{ + expected = %LangChain.MessageDelta{ content: nil, index: 0, function_name: "calculator", @@ -163,7 +163,7 @@ defmodule Langchain.MessageDeltaTest do MessageDelta.merge_delta(acc, new_delta) end) - expected = %Langchain.MessageDelta{ + expected = %LangChain.MessageDelta{ content: "Sure, I can help with that. First, let's check which regions are currently available for deployment on Fly.io. Please wait a moment while I fetch this information for you.", index: 0, @@ -180,7 +180,7 @@ defmodule Langchain.MessageDeltaTest do describe "to_message/1" do test "transform a merged and complete MessageDelta to a Message" do # :assistant content type - delta = %Langchain.MessageDelta{ + delta = %LangChain.MessageDelta{ content: "Hello! How can I assist you?", role: :assistant, status: :complete @@ -191,7 +191,7 @@ defmodule Langchain.MessageDeltaTest do assert msg.content == "Hello! How can I assist you?" # :assistant type - delta = %Langchain.MessageDelta{ + delta = %LangChain.MessageDelta{ role: :assistant, function_name: "calculator", arguments: "{\n \"expression\": \"100 + 300 - 200\"\n}", @@ -207,7 +207,7 @@ defmodule Langchain.MessageDeltaTest do end test "does not transform an incomplete MessageDelta to a Message" do - delta = %Langchain.MessageDelta{ + delta = %LangChain.MessageDelta{ content: "Hello! How can I assist ", role: :assistant, status: :incomplete @@ -217,7 +217,7 @@ defmodule Langchain.MessageDeltaTest do end test "transforms a delta stopped for length" do - delta = %Langchain.MessageDelta{ + delta = %LangChain.MessageDelta{ content: "Hello! How can I assist ", role: :assistant, status: :length @@ -233,7 +233,7 @@ defmodule Langchain.MessageDeltaTest do # a partially merged delta is invalid. It may have the "complete" flag but # if previous message deltas are missing and were not merged, the # to_message function will fail. - delta = %Langchain.MessageDelta{ + delta = %LangChain.MessageDelta{ role: :assistant, function_name: "calculator", arguments: "{\n \"expression\": \"100 + 300 - 200\"", diff --git a/test/message_test.exs b/test/message_test.exs index b4984fb4..4aed7868 100644 --- a/test/message_test.exs +++ b/test/message_test.exs @@ -1,7 +1,7 @@ -defmodule Langchain.MessageTest do +defmodule LangChain.MessageTest do use ExUnit.Case - doctest Langchain.Message - alias Langchain.Message + doctest LangChain.Message + alias LangChain.Message describe "new/1" do test "works with minimal attrs" do @@ -111,7 +111,7 @@ defmodule Langchain.MessageTest do end test "requires content" do - assert_raise Langchain.LangchainError, "content: can't be blank", fn -> + assert_raise LangChain.LangChainError, "content: can't be blank", fn -> Message.new_system!(nil) end end @@ -137,7 +137,7 @@ defmodule Langchain.MessageTest do end test "requires content" do - assert_raise Langchain.LangchainError, "content: can't be blank", fn -> + assert_raise LangChain.LangChainError, "content: can't be blank", fn -> Message.new_user!(nil) end end diff --git a/test/prompt_template_test.exs b/test/prompt_template_test.exs index 1223534a..c2fd4d3d 100644 --- a/test/prompt_template_test.exs +++ b/test/prompt_template_test.exs @@ -1,9 +1,9 @@ -defmodule Langchain.PromptTemplateTest do +defmodule LangChain.PromptTemplateTest do use ExUnit.Case - doctest Langchain.PromptTemplate - alias Langchain.PromptTemplate - alias Langchain.LangchainError - alias Langchain.Message + doctest LangChain.PromptTemplate + alias LangChain.PromptTemplate + alias LangChain.LangChainError + alias LangChain.Message describe "new/1" do test "create with text and no inputs" do @@ -32,7 +32,7 @@ defmodule Langchain.PromptTemplateTest do end test "raise exception with text reason when invalid" do - assert_raise LangchainError, "text: can't be blank", fn -> + assert_raise LangChainError, "text: can't be blank", fn -> PromptTemplate.new!(%{text: ""}) end end @@ -62,7 +62,7 @@ defmodule Langchain.PromptTemplateTest do end test "raises an exception when invalid" do - assert_raise LangchainError, "text: can't be blank", fn -> + assert_raise LangChainError, "text: can't be blank", fn -> PromptTemplate.from_template!("") end end diff --git a/test/support/base_case.ex b/test/support/base_case.ex index 0b5745b8..b9624e18 100644 --- a/test/support/base_case.ex +++ b/test/support/base_case.ex @@ -1,19 +1,19 @@ -defmodule Langchain.BaseCase do +defmodule LangChain.BaseCase do @moduledoc """ This module defines the test case to be used by - tests that use Langchain features like Chat or LLMs. + tests that use LangChain features like Chat or LLMs. """ use ExUnit.CaseTemplate using do quote do - alias Langchain.Message - alias Langchain.MessageDelta + alias LangChain.Message + alias LangChain.MessageDelta # Import conveniences for testing with AI models - import Langchain.BaseCase - import Langchain.Utils.ApiOverride + import LangChain.BaseCase + import LangChain.Utils.ApiOverride end end end diff --git a/test/support/fixtures.ex b/test/support/fixtures.ex index c29de122..2dcefd4e 100644 --- a/test/support/fixtures.ex +++ b/test/support/fixtures.ex @@ -1,10 +1,10 @@ -defmodule Langchain.Fixtures do +defmodule LangChain.Fixtures do @moduledoc """ This module defines test helpers for creating entities. """ - alias Langchain.ChatModels.ChatOpenAI - alias Langchain.Message + alias LangChain.ChatModels.ChatOpenAI + alias LangChain.Message def raw_deltas_for_function_call(function_name \\ "hello_world") @@ -372,7 +372,7 @@ defmodule Langchain.Fixtures do # results = Enum.flat_map(delta_content, &ChatOpenAI.do_process_response(&1)) # IO.inspect results [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: nil, index: 0, function_name: nil, @@ -380,7 +380,7 @@ defmodule Langchain.Fixtures do arguments: nil, status: :incomplete }, - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: "Hello", index: 0, function_name: nil, @@ -388,7 +388,7 @@ defmodule Langchain.Fixtures do arguments: nil, status: :incomplete }, - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: "!", index: 0, function_name: nil, @@ -396,7 +396,7 @@ defmodule Langchain.Fixtures do arguments: nil, status: :incomplete }, - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " How", index: 0, function_name: nil, @@ -404,7 +404,7 @@ defmodule Langchain.Fixtures do arguments: nil, status: :incomplete }, - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " can", index: 0, function_name: nil, @@ -412,7 +412,7 @@ defmodule Langchain.Fixtures do arguments: nil, status: :incomplete }, - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " I", index: 0, function_name: nil, @@ -420,7 +420,7 @@ defmodule Langchain.Fixtures do arguments: nil, status: :incomplete }, - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " assist", index: 0, function_name: nil, @@ -428,7 +428,7 @@ defmodule Langchain.Fixtures do arguments: nil, status: :incomplete }, - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " you", index: 0, function_name: nil, @@ -436,7 +436,7 @@ defmodule Langchain.Fixtures do arguments: nil, status: :incomplete }, - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " today", index: 0, function_name: nil, @@ -444,7 +444,7 @@ defmodule Langchain.Fixtures do arguments: nil, status: :incomplete }, - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: "?", index: 0, function_name: nil, @@ -452,7 +452,7 @@ defmodule Langchain.Fixtures do arguments: nil, status: :incomplete }, - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: nil, index: 0, function_name: nil, @@ -465,7 +465,7 @@ defmodule Langchain.Fixtures do def delta_function_no_args() do [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: nil, index: 0, function_name: "hello_world", @@ -473,7 +473,7 @@ defmodule Langchain.Fixtures do arguments: nil, status: :incomplete }, - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: nil, index: 0, function_name: nil, @@ -481,7 +481,7 @@ defmodule Langchain.Fixtures do arguments: "{}", status: :incomplete }, - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: nil, index: 0, function_name: nil, @@ -494,7 +494,7 @@ defmodule Langchain.Fixtures do def delta_function_streamed_args() do [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: nil, index: 0, function_name: "calculator", @@ -502,7 +502,7 @@ defmodule Langchain.Fixtures do arguments: "", status: :incomplete }, - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: nil, index: 0, function_name: nil, @@ -510,7 +510,7 @@ defmodule Langchain.Fixtures do arguments: "{\n", status: :incomplete }, - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: nil, index: 0, function_name: nil, @@ -518,7 +518,7 @@ defmodule Langchain.Fixtures do arguments: " ", status: :incomplete }, - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: nil, index: 0, function_name: nil, @@ -526,7 +526,7 @@ defmodule Langchain.Fixtures do arguments: " \"", status: :incomplete }, - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: nil, index: 0, function_name: nil, @@ -534,7 +534,7 @@ defmodule Langchain.Fixtures do arguments: "expression", status: :incomplete }, - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: nil, index: 0, function_name: nil, @@ -542,7 +542,7 @@ defmodule Langchain.Fixtures do arguments: "\":", status: :incomplete }, - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: nil, index: 0, function_name: nil, @@ -550,7 +550,7 @@ defmodule Langchain.Fixtures do arguments: " \"", status: :incomplete }, - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: nil, index: 0, function_name: nil, @@ -558,7 +558,7 @@ defmodule Langchain.Fixtures do arguments: "100", status: :incomplete }, - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: nil, index: 0, function_name: nil, @@ -566,7 +566,7 @@ defmodule Langchain.Fixtures do arguments: " +", status: :incomplete }, - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: nil, index: 0, function_name: nil, @@ -574,7 +574,7 @@ defmodule Langchain.Fixtures do arguments: " ", status: :incomplete }, - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: nil, index: 0, function_name: nil, @@ -582,7 +582,7 @@ defmodule Langchain.Fixtures do arguments: "300", status: :incomplete }, - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: nil, index: 0, function_name: nil, @@ -590,7 +590,7 @@ defmodule Langchain.Fixtures do arguments: " -", status: :incomplete }, - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: nil, index: 0, function_name: nil, @@ -598,7 +598,7 @@ defmodule Langchain.Fixtures do arguments: " ", status: :incomplete }, - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: nil, index: 0, function_name: nil, @@ -606,7 +606,7 @@ defmodule Langchain.Fixtures do arguments: "200", status: :incomplete }, - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: nil, index: 0, function_name: nil, @@ -614,7 +614,7 @@ defmodule Langchain.Fixtures do arguments: "\"\n", status: :incomplete }, - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: nil, index: 0, function_name: nil, @@ -622,7 +622,7 @@ defmodule Langchain.Fixtures do arguments: "}", status: :incomplete }, - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: nil, index: 0, function_name: nil, @@ -640,7 +640,7 @@ defmodule Langchain.Fixtures do # This replicates the data returned in that type of response. [ [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: "", status: :incomplete, index: 0, @@ -650,7 +650,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: "Sure", status: :incomplete, index: 0, @@ -660,7 +660,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: ",", status: :incomplete, index: 0, @@ -670,7 +670,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " I", status: :incomplete, index: 0, @@ -680,7 +680,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " can", status: :incomplete, index: 0, @@ -690,7 +690,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " help", status: :incomplete, index: 0, @@ -700,7 +700,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " with", status: :incomplete, index: 0, @@ -710,7 +710,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " that", status: :incomplete, index: 0, @@ -720,7 +720,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: ".", status: :incomplete, index: 0, @@ -730,7 +730,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " First", status: :incomplete, index: 0, @@ -740,7 +740,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: ",", status: :incomplete, index: 0, @@ -750,7 +750,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " let", status: :incomplete, index: 0, @@ -760,7 +760,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: "'s", status: :incomplete, index: 0, @@ -770,7 +770,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " check", status: :incomplete, index: 0, @@ -780,7 +780,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " which", status: :incomplete, index: 0, @@ -790,7 +790,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " regions", status: :incomplete, index: 0, @@ -800,7 +800,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " are", status: :incomplete, index: 0, @@ -810,7 +810,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " currently", status: :incomplete, index: 0, @@ -820,7 +820,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " available", status: :incomplete, index: 0, @@ -830,7 +830,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " for", status: :incomplete, index: 0, @@ -840,7 +840,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " deployment", status: :incomplete, index: 0, @@ -850,7 +850,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " on", status: :incomplete, index: 0, @@ -860,7 +860,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " Fly", status: :incomplete, index: 0, @@ -870,7 +870,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: ".io", status: :incomplete, index: 0, @@ -880,7 +880,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: ".", status: :incomplete, index: 0, @@ -890,7 +890,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " Please", status: :incomplete, index: 0, @@ -900,7 +900,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " wait", status: :incomplete, index: 0, @@ -910,7 +910,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " a", status: :incomplete, index: 0, @@ -920,7 +920,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " moment", status: :incomplete, index: 0, @@ -930,7 +930,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " while", status: :incomplete, index: 0, @@ -940,7 +940,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " I", status: :incomplete, index: 0, @@ -950,7 +950,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " fetch", status: :incomplete, index: 0, @@ -960,7 +960,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " this", status: :incomplete, index: 0, @@ -970,7 +970,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " information", status: :incomplete, index: 0, @@ -980,7 +980,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " for", status: :incomplete, index: 0, @@ -990,7 +990,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: " you", status: :incomplete, index: 0, @@ -1000,7 +1000,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: ".", status: :incomplete, index: 0, @@ -1010,7 +1010,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: nil, status: :incomplete, index: 0, @@ -1020,7 +1020,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: nil, status: :incomplete, index: 0, @@ -1030,7 +1030,7 @@ defmodule Langchain.Fixtures do } ], [ - %Langchain.MessageDelta{ + %LangChain.MessageDelta{ content: nil, status: :complete, index: 0, diff --git a/test/tools/calculator_test.exs b/test/tools/calculator_test.exs index 6cab526e..b8f03520 100644 --- a/test/tools/calculator_test.exs +++ b/test/tools/calculator_test.exs @@ -1,11 +1,11 @@ -defmodule Langchain.Tools.CalculatorTest do - alias Langchain.Chains.LLMChain - use Langchain.BaseCase +defmodule LangChain.Tools.CalculatorTest do + alias LangChain.Chains.LLMChain + use LangChain.BaseCase - doctest Langchain.Tools.Calculator - alias Langchain.Tools.Calculator - alias Langchain.Function - alias Langchain.ChatModels.ChatOpenAI + doctest LangChain.Tools.Calculator + alias LangChain.Tools.Calculator + alias LangChain.Function + alias LangChain.ChatModels.ChatOpenAI describe "new/0" do test "defines the function correctly" do diff --git a/test/utils_test.exs b/test/utils_test.exs index b3de9f6f..1866a718 100644 --- a/test/utils_test.exs +++ b/test/utils_test.exs @@ -1,8 +1,8 @@ -defmodule Langchain.UtilsTest do +defmodule LangChain.UtilsTest do use ExUnit.Case - doctest Langchain.Utils - alias Langchain.Utils + doctest LangChain.Utils + alias LangChain.Utils defmodule FakeSchema do use Ecto.Schema @@ -60,26 +60,26 @@ defmodule Langchain.UtilsTest do end test "handles ecto enum type errors" do - {:error, changeset} = Langchain.MessageDelta.new(%{role: "invalid"}) + {:error, changeset} = LangChain.MessageDelta.new(%{role: "invalid"}) result = Utils.changeset_error_to_string(changeset) assert result == "role: is invalid" end test "handles multiple errors on a field" do - {:error, changeset} = Langchain.MessageDelta.new(%{role: "invalid"}) + {:error, changeset} = LangChain.MessageDelta.new(%{role: "invalid"}) changeset = Ecto.Changeset.add_error(changeset, :role, "is required") result = Utils.changeset_error_to_string(changeset) assert result == "role: is required, is invalid" end test "handles errors on multiple fields" do - {:error, changeset} = Langchain.MessageDelta.new(%{role: "invalid", index: "abc"}) + {:error, changeset} = LangChain.MessageDelta.new(%{role: "invalid", index: "abc"}) result = Utils.changeset_error_to_string(changeset) assert result == "role: is invalid; index: is invalid" end test "handles multiple errors on multiple fields" do - {:error, changeset} = Langchain.MessageDelta.new(%{role: "invalid", index: "abc"}) + {:error, changeset} = LangChain.MessageDelta.new(%{role: "invalid", index: "abc"}) changeset = changeset |> Ecto.Changeset.add_error(:index, "is numeric")