From c9081ffc72ad779d13b5b59b4baff8d5ef99befa Mon Sep 17 00:00:00 2001
From: Jinkyou Son <nallwhy@gmail.com>
Date: Thu, 16 Jan 2025 09:03:35 +0900
Subject: [PATCH 1/2] Add raw field to TokenUsage

---
 lib/token_usage.ex        | 8 ++++++--
 test/token_usage_test.exs | 4 +++-
 2 files changed, 9 insertions(+), 3 deletions(-)

diff --git a/lib/token_usage.ex b/lib/token_usage.ex
index 472dbb96..0fb7cf1a 100644
--- a/lib/token_usage.ex
+++ b/lib/token_usage.ex
@@ -6,7 +6,10 @@ defmodule LangChain.TokenUsage do
 
       %TokenUsage{
         input: 30,
-        output: 15
+        output: 15,
+        raw: %{
+          "total_tokens" => 29
+        }
       }
 
   Input is the tokens from the prompt. Output is the completion or generated
@@ -23,11 +26,12 @@ defmodule LangChain.TokenUsage do
   embedded_schema do
     field :input, :integer
     field :output, :integer
+    field :raw, :map, default: %{}
   end
 
   @type t :: %TokenUsage{}
 
-  @create_fields [:input, :output]
+  @create_fields [:input, :output, :raw]
   # Anthropic returns only the output token count when streaming deltas
   @required_fields [:output]
 
diff --git a/test/token_usage_test.exs b/test/token_usage_test.exs
index 4b133868..8269cb37 100644
--- a/test/token_usage_test.exs
+++ b/test/token_usage_test.exs
@@ -7,10 +7,12 @@ defmodule LangChain.TokenUsageTest do
 
   describe "new/1" do
     test "accepts valid data" do
-      assert {:ok, %TokenUsage{} = usage} = TokenUsage.new(%{"input" => 1, "output" => 2})
+      assert {:ok, %TokenUsage{} = usage} =
+               TokenUsage.new(%{"input" => 1, "output" => 2, "raw" => %{"total_tokens" => 29}})
 
       assert usage.input == 1
       assert usage.output == 2
+      assert usage.raw == %{"total_tokens" => 29}
     end
 
     test "returns error when invalid" do

From 0784df48aaf4ad4ad24734d60a03d68305040a22 Mon Sep 17 00:00:00 2001
From: Jinkyou Son <nallwhy@gmail.com>
Date: Thu, 16 Jan 2025 09:05:46 +0900
Subject: [PATCH 2/2] Fill raw of TokenUsage in chat models

---
 lib/chat_models/chat_anthropic.ex | 3 ++-
 lib/chat_models/chat_bumblebee.ex | 4 ++--
 lib/chat_models/chat_google_ai.ex | 3 ++-
 lib/chat_models/chat_open_ai.ex   | 3 ++-
 4 files changed, 8 insertions(+), 5 deletions(-)

diff --git a/lib/chat_models/chat_anthropic.ex b/lib/chat_models/chat_anthropic.ex
index 40c1ff17..7398e9f7 100644
--- a/lib/chat_models/chat_anthropic.ex
+++ b/lib/chat_models/chat_anthropic.ex
@@ -1021,7 +1021,8 @@ defmodule LangChain.ChatModels.ChatAnthropic do
     #  https://platform.openai.com/docs/api-reference/chat/object#chat/object-usage
     TokenUsage.new!(%{
       input: Map.get(usage, "input_tokens"),
-      output: Map.get(usage, "output_tokens")
+      output: Map.get(usage, "output_tokens"),
+      raw: usage
     })
   end
 
diff --git a/lib/chat_models/chat_bumblebee.ex b/lib/chat_models/chat_bumblebee.ex
index c7d3185f..a9196176 100644
--- a/lib/chat_models/chat_bumblebee.ex
+++ b/lib/chat_models/chat_bumblebee.ex
@@ -322,9 +322,9 @@ defmodule LangChain.ChatModels.ChatBumblebee do
     [result]
   end
 
-  defp fire_token_usage_callback(model, %{input: input, output: output} = _token_summary) do
+  defp fire_token_usage_callback(model, %{input: input, output: output} = token_summary) do
     Callbacks.fire(model.callbacks, :on_llm_token_usage, [
-      TokenUsage.new!(%{input: input, output: output})
+      TokenUsage.new!(%{input: input, output: output, raw: token_summary})
     ])
   end
 
diff --git a/lib/chat_models/chat_google_ai.ex b/lib/chat_models/chat_google_ai.ex
index 5caeeb9a..257c580c 100644
--- a/lib/chat_models/chat_google_ai.ex
+++ b/lib/chat_models/chat_google_ai.ex
@@ -739,7 +739,8 @@ defmodule LangChain.ChatModels.ChatGoogleAI do
     # extract out the reported response token usage
     TokenUsage.new!(%{
       input: Map.get(usage, "promptTokenCount", 0),
-      output: Map.get(usage, "candidatesTokenCount", 0)
+      output: Map.get(usage, "candidatesTokenCount", 0),
+      raw: usage
     })
   end
 
diff --git a/lib/chat_models/chat_open_ai.ex b/lib/chat_models/chat_open_ai.ex
index 2435f847..6f4cc113 100644
--- a/lib/chat_models/chat_open_ai.ex
+++ b/lib/chat_models/chat_open_ai.ex
@@ -1046,7 +1046,8 @@ defmodule LangChain.ChatModels.ChatOpenAI do
     #  https://platform.openai.com/docs/api-reference/chat/object#chat/object-usage
     TokenUsage.new!(%{
       input: Map.get(usage, "prompt_tokens"),
-      output: Map.get(usage, "completion_tokens")
+      output: Map.get(usage, "completion_tokens"),
+      raw: usage
     })
   end