Skip to content

Commit

Permalink
Merge pull request #263 from marcklingen/feat/track-usage-langfuse
Browse files Browse the repository at this point in the history
feat: track token usage in Langfuse (and fix memory leak)
  • Loading branch information
tjbck committed Sep 19, 2024
2 parents de253bf + 234571e commit 2671d7e
Showing 1 changed file with 18 additions and 5 deletions.
23 changes: 18 additions & 5 deletions examples/filters/langfuse_filter_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
title: Langfuse Filter Pipeline
author: open-webui
date: 2024-05-30
version: 1.2
version: 1.3
license: MIT
description: A filter pipeline that uses Langfuse.
requirements: langfuse
Expand Down Expand Up @@ -113,13 +113,26 @@ async def outlet(self, body: dict, user: Optional[dict] = None) -> dict:
return body

generation = self.chat_generations[body["chat_id"]]
assistant_message = get_last_assistant_message(body["messages"])

# Extract usage information
info = assistant_message.get("info", {})
usage = None
if "prompt_tokens" in info and "completion_tokens" in info:
usage = {
"input": info["prompt_tokens"],
"output": info["completion_tokens"],
"unit": "TOKENS",
}

user_message = get_last_user_message(body["messages"])
generated_message = get_last_assistant_message(body["messages"])

# Update generation
generation.end(
output=generated_message,
output=assistant_message,
metadata={"interface": "open-webui"},
usage=usage,
)

# Clean up the chat_generations dictionary
del self.chat_generations[body["chat_id"]]

return body

0 comments on commit 2671d7e

Please sign in to comment.