Skip to content

Commit 97b1349

Browse files
committed
typing fixes
1 parent 83a7232 commit 97b1349

File tree

2 files changed

+8
-5
lines changed

2 files changed

+8
-5
lines changed

vocode/streaming/agent/anthropic_utils.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ def format_anthropic_chat_messages_from_transcript(
2929
# TODO: reliably count tokens of Anthropic messages so that we don't exceed the context window
3030

3131
def merge_bot_messages_for_langchain(messages: list[tuple]) -> list[tuple]:
32-
merged_messages = []
32+
merged_messages: list[tuple] = []
3333
for role, message in messages:
3434
if role == "ai" and merged_messages and merged_messages[-1][0] == "ai":
3535
merged_messages[-1] = ("ai", merged_messages[-1][1] + message)

vocode/streaming/agent/langchain_agent.py

+7-4
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import sentry_sdk
44
from loguru import logger
55

6-
from langchain_core.messages.ai import AIMessageChunk
6+
from langchain_core.messages.base import BaseMessage as LangchainBaseMessage
77
from langchain_core.prompts import ChatPromptTemplate
88
from langchain.chat_models import init_chat_model
99

@@ -44,10 +44,13 @@ def __init__(
4444

4545
async def token_generator(
4646
self,
47-
gen: AsyncIterator[AIMessageChunk],
48-
) -> AsyncGenerator[str | FunctionFragment, None]:
47+
gen: AsyncIterator[LangchainBaseMessage],
48+
) -> AsyncGenerator[str, None]:
4949
async for chunk in gen:
50-
yield chunk.content
50+
if isinstance(chunk.content, str):
51+
yield chunk.content
52+
else:
53+
raise ValueError(f"Received unexpected message type {type(chunk)} from Langchain. Expected str.")
5154

5255
def format_langchain_messages_from_transcript(self) -> list[tuple]:
5356
if not self.transcript:

0 commit comments

Comments
 (0)