Commit 97b1349 1 parent 83a7232 commit 97b1349 Copy full SHA for 97b1349
File tree 2 files changed +8
-5
lines changed
2 files changed +8
-5
lines changed Original file line number Diff line number Diff line change @@ -29,7 +29,7 @@ def format_anthropic_chat_messages_from_transcript(
29
29
# TODO: reliably count tokens of Anthropic messages so that we don't exceed the context window
30
30
31
31
def merge_bot_messages_for_langchain (messages : list [tuple ]) -> list [tuple ]:
32
- merged_messages = []
32
+ merged_messages : list [ tuple ] = []
33
33
for role , message in messages :
34
34
if role == "ai" and merged_messages and merged_messages [- 1 ][0 ] == "ai" :
35
35
merged_messages [- 1 ] = ("ai" , merged_messages [- 1 ][1 ] + message )
Original file line number Diff line number Diff line change 3
3
import sentry_sdk
4
4
from loguru import logger
5
5
6
- from langchain_core .messages .ai import AIMessageChunk
6
+ from langchain_core .messages .base import BaseMessage as LangchainBaseMessage
7
7
from langchain_core .prompts import ChatPromptTemplate
8
8
from langchain .chat_models import init_chat_model
9
9
@@ -44,10 +44,13 @@ def __init__(
44
44
45
45
async def token_generator (
46
46
self ,
47
- gen : AsyncIterator [AIMessageChunk ],
48
- ) -> AsyncGenerator [str | FunctionFragment , None ]:
47
+ gen : AsyncIterator [LangchainBaseMessage ],
48
+ ) -> AsyncGenerator [str , None ]:
49
49
async for chunk in gen :
50
- yield chunk .content
50
+ if isinstance (chunk .content , str ):
51
+ yield chunk .content
52
+ else :
53
+ raise ValueError (f"Received unexpected message type { type (chunk )} from Langchain. Expected str." )
51
54
52
55
def format_langchain_messages_from_transcript (self ) -> list [tuple ]:
53
56
if not self .transcript :
You can’t perform that action at this time.
0 commit comments