Skip to content

Commit

Permalink
Add raw field to TokenUsage (#236)
Browse files Browse the repository at this point in the history
* Add raw field to TokenUsage

* Fill raw of TokenUsage in chat models
  • Loading branch information
nallwhy authored Jan 22, 2025
1 parent c2b22c4 commit f3f6a66
Show file tree
Hide file tree
Showing 6 changed files with 17 additions and 8 deletions.
3 changes: 2 additions & 1 deletion lib/chat_models/chat_anthropic.ex
Original file line number Diff line number Diff line change
Expand Up @@ -1034,7 +1034,8 @@ defmodule LangChain.ChatModels.ChatAnthropic do
# "cache_creation_input_tokens" and "cache_read_input_tokens"
TokenUsage.new!(%{
input: Map.get(usage, "input_tokens"),
output: Map.get(usage, "output_tokens")
output: Map.get(usage, "output_tokens"),
raw: usage
})
end

Expand Down
4 changes: 2 additions & 2 deletions lib/chat_models/chat_bumblebee.ex
Original file line number Diff line number Diff line change
Expand Up @@ -322,9 +322,9 @@ defmodule LangChain.ChatModels.ChatBumblebee do
[result]
end

defp fire_token_usage_callback(model, %{input: input, output: output} = _token_summary) do
defp fire_token_usage_callback(model, %{input: input, output: output} = token_summary) do
Callbacks.fire(model.callbacks, :on_llm_token_usage, [
TokenUsage.new!(%{input: input, output: output})
TokenUsage.new!(%{input: input, output: output, raw: token_summary})
])
end

Expand Down
3 changes: 2 additions & 1 deletion lib/chat_models/chat_google_ai.ex
Original file line number Diff line number Diff line change
Expand Up @@ -739,7 +739,8 @@ defmodule LangChain.ChatModels.ChatGoogleAI do
# extract out the reported response token usage
TokenUsage.new!(%{
input: Map.get(usage, "promptTokenCount", 0),
output: Map.get(usage, "candidatesTokenCount", 0)
output: Map.get(usage, "candidatesTokenCount", 0),
raw: usage
})
end

Expand Down
3 changes: 2 additions & 1 deletion lib/chat_models/chat_open_ai.ex
Original file line number Diff line number Diff line change
Expand Up @@ -1046,7 +1046,8 @@ defmodule LangChain.ChatModels.ChatOpenAI do
# https://platform.openai.com/docs/api-reference/chat/object#chat/object-usage
TokenUsage.new!(%{
input: Map.get(usage, "prompt_tokens"),
output: Map.get(usage, "completion_tokens")
output: Map.get(usage, "completion_tokens"),
raw: usage
})
end

Expand Down
8 changes: 6 additions & 2 deletions lib/token_usage.ex
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,10 @@ defmodule LangChain.TokenUsage do
%TokenUsage{
input: 30,
output: 15
output: 15,
raw: %{
"total_tokens" => 29
}
}
Input is the tokens from the prompt. Output is the completion or generated
Expand All @@ -23,11 +26,12 @@ defmodule LangChain.TokenUsage do
embedded_schema do
field :input, :integer
field :output, :integer
field :raw, :map, default: %{}
end

@type t :: %TokenUsage{}

@create_fields [:input, :output]
@create_fields [:input, :output, :raw]
# Anthropic returns only the output token count when streaming deltas
@required_fields [:output]

Expand Down
4 changes: 3 additions & 1 deletion test/token_usage_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,12 @@ defmodule LangChain.TokenUsageTest do

describe "new/1" do
test "accepts valid data" do
assert {:ok, %TokenUsage{} = usage} = TokenUsage.new(%{"input" => 1, "output" => 2})
assert {:ok, %TokenUsage{} = usage} =
TokenUsage.new(%{"input" => 1, "output" => 2, "raw" => %{"total_tokens" => 29}})

assert usage.input == 1
assert usage.output == 2
assert usage.raw == %{"total_tokens" => 29}
end

test "returns error when invalid" do
Expand Down

0 comments on commit f3f6a66

Please sign in to comment.