Skip to content

Commit

Permalink
Merge pull request #1569 from better629/main
Browse files Browse the repository at this point in the history
update aws claude token usage
  • Loading branch information
geekan authored Nov 4, 2024
2 parents 59715c0 + fb817a7 commit 99c1f95
Showing 1 changed file with 7 additions and 1 deletion.
8 changes: 7 additions & 1 deletion metagpt/utils/token_counter.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,8 @@
"claude-2.0": {"prompt": 0.008, "completion": 0.024},
"claude-2.1": {"prompt": 0.008, "completion": 0.024},
"claude-3-sonnet-20240229": {"prompt": 0.003, "completion": 0.015},
"claude-3-5-sonnet": {"prompt": 0.003, "completion": 0.015},
"claude-3-5-sonnet-v2": {"prompt": 0.003, "completion": 0.015}, # alias of newer 3.5 sonnet
"claude-3-5-sonnet-20240620": {"prompt": 0.003, "completion": 0.015},
"claude-3-opus-20240229": {"prompt": 0.015, "completion": 0.075},
"claude-3-haiku-20240307": {"prompt": 0.00025, "completion": 0.00125},
Expand Down Expand Up @@ -379,8 +381,12 @@
def count_input_tokens(messages, model="gpt-3.5-turbo-0125"):
"""Return the number of tokens used by a list of messages."""
if "claude" in model:
# rough estimation for models newer than claude-2.1
vo = anthropic.Client()
num_tokens = vo.count_tokens(str(messages))
num_tokens = 0
for message in messages:
for key, value in message.items():
num_tokens += vo.count_tokens(str(value))
return num_tokens
try:
encoding = tiktoken.encoding_for_model(model)
Expand Down

0 comments on commit 99c1f95

Please sign in to comment.