Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Add voice provider integration #2027

Open
wants to merge 14 commits into
base: main
Choose a base branch
from
2 changes: 1 addition & 1 deletion letta/agent_store/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@

# from letta.schemas.message import Message, Passage, Record, RecordType, ToolCall
from letta.schemas.message import Message
from letta.schemas.openai.chat_completions import ToolCall
from letta.schemas.openai.chat_completion_request import ToolCall
from letta.schemas.passage import Passage
from letta.settings import settings

Expand Down
2 changes: 1 addition & 1 deletion letta/client/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
RecallMemorySummary,
)
from letta.schemas.message import Message, MessageCreate, UpdateMessage
from letta.schemas.openai.chat_completions import ToolCall
from letta.schemas.openai.chat_completion_request import ToolCall
from letta.schemas.organization import Organization
from letta.schemas.passage import Passage
from letta.schemas.sandbox_config import (
Expand Down
2 changes: 1 addition & 1 deletion letta/llm_api/azure_openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@

from letta.llm_api.helpers import make_post_request
from letta.schemas.llm_config import LLMConfig
from letta.schemas.openai.chat_completion_request import ChatCompletionRequest
from letta.schemas.openai.chat_completion_response import ChatCompletionResponse
from letta.schemas.openai.chat_completions import ChatCompletionRequest
from letta.schemas.openai.embedding_response import EmbeddingResponse
from letta.settings import ModelSettings

Expand Down
1 change: 1 addition & 0 deletions letta/llm_api/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -415,6 +415,7 @@ def _sse_post(url: str, data: dict, headers: dict) -> Generator[ChatCompletionCh
chunk_data = json.loads(sse.data)
# print("chunk_data::", chunk_data)
chunk_object = ChatCompletionChunkResponse(**chunk_data)
chunk_object.created = chunk_object.created.timestamp()
# print("chunk_object::", chunk_object)
# id=chunk_data["id"],
# choices=[ChunkChoice],
Expand Down
4 changes: 2 additions & 2 deletions letta/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from letta.schemas.job import Job
from letta.schemas.llm_config import LLMConfig
from letta.schemas.memory import Memory
from letta.schemas.openai.chat_completions import ToolCall, ToolCallFunction
from letta.schemas.openai.chat_completion_request import ToolCall, ToolCallFunction
from letta.schemas.tool_rule import (
BaseToolRule,
InitToolRule,
Expand Down Expand Up @@ -487,6 +487,6 @@ def update_job(self, job: Job) -> Job:
def update_job_status(self, job_id: str, status: JobStatus):
with self.session_maker() as session:
session.query(JobModel).filter(JobModel.id == job_id).update({"status": status})
if status == JobStatus.COMPLETED:
if status == JobStatus.completed:
session.query(JobModel).filter(JobModel.id == job_id).update({"completed_at": get_utc_time()})
session.commit()
61 changes: 53 additions & 8 deletions letta/schemas/message.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import json
import warnings
from datetime import datetime, timezone
from typing import List, Literal, Optional
from typing import List, Literal, Optional, Union

from pydantic import Field, field_validator

Expand All @@ -14,17 +14,25 @@
from letta.local_llm.constants import INNER_THOUGHTS_KWARG
from letta.schemas.enums import MessageRole
from letta.schemas.letta_base import LettaBase
from letta.schemas.letta_message import AssistantMessage as LettaAssistantMessage
from letta.schemas.letta_message import (
AssistantMessage,
FunctionCall,
FunctionCallMessage,
FunctionReturn,
InternalMonologue,
LettaMessage,
)
from letta.schemas.letta_message import SystemMessage as LettaSystemMessage
from letta.schemas.letta_message import UserMessage as LettaUserMessage
from letta.schemas.openai.chat_completion_request import (
AssistantMessage,
ChatCompletionRequest,
SystemMessage,
ToolCall,
ToolCallFunction,
ToolMessage,
UserMessage,
)
from letta.schemas.openai.chat_completions import ToolCall, ToolCallFunction
from letta.utils import get_utc_time, is_utc_datetime, json_dumps


Expand Down Expand Up @@ -120,6 +128,43 @@ def validate_role(cls, v: str) -> str:
assert v in roles, f"Role must be one of {roles}"
return v

@classmethod
def from_chat_completions_message(
cls,
chat_completions_msg: Union[SystemMessage, UserMessage, AssistantMessage, ToolMessage],
completion_request: ChatCompletionRequest,
user_id: str,
) -> "Message":
message = None
if isinstance(chat_completions_msg, SystemMessage):
message = Message(role=MessageRole.system, text=chat_completions_msg.content, name=chat_completions_msg.name)
elif isinstance(chat_completions_msg, UserMessage):
message = Message(
role=MessageRole.user,
text=(
chat_completions_msg.content
if isinstance(chat_completions_msg.content, str)
else "\n".join(chat_completions_msg.content)
),
name=chat_completions_msg.name,
)
elif isinstance(chat_completions_msg, AssistantMessage):
message = Message(
role=MessageRole.assistant,
text=chat_completions_msg.content,
name=chat_completions_msg.name,
tool_calls=chat_completions_msg.tool_calls,
)
elif isinstance(chat_completions_msg, ToolMessage):
message = Message(role=MessageRole.tool, text=chat_completions_msg.content, tool_call_id=chat_completions_msg.tool_call_id)
else:
raise ValueError(f"Unsupported message type: {type(chat_completions_msg)}")

message.user_id = user_id
message.agent_id = completion_request.user
message.model = completion_request.model
return message

def to_json(self):
json_message = vars(self)
if json_message["tool_calls"] is not None:
Expand Down Expand Up @@ -164,7 +209,7 @@ def to_letta_message(
except KeyError:
raise ValueError(f"Function call {tool_call.function.name} missing {DEFAULT_MESSAGE_TOOL_KWARG} argument")
messages.append(
AssistantMessage(
LettaAssistantMessage(
id=self.id,
date=self.created_at,
assistant_message=message_string,
Expand Down Expand Up @@ -217,20 +262,20 @@ def to_letta_message(
)
)
elif self.role == MessageRole.user:
# This is type UserMessage
# This is type LettaUserMessage
assert self.text is not None, self
messages.append(
UserMessage(
LettaUserMessage(
id=self.id,
date=self.created_at,
message=self.text,
)
)
elif self.role == MessageRole.system:
# This is type SystemMessage
# This is type LettaSystemMessage
assert self.text is not None, self
messages.append(
SystemMessage(
LettaSystemMessage(
id=self.id,
date=self.created_at,
message=self.text,
Expand Down
30 changes: 17 additions & 13 deletions letta/schemas/openai/chat_completion_request.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,19 +2,10 @@

from pydantic import BaseModel, Field


class SystemMessage(BaseModel):
content: str
role: str = "system"
name: Optional[str] = None


class UserMessage(BaseModel):
content: Union[str, List[str]]
role: str = "user"
name: Optional[str] = None
from letta.schemas.enums import MessageRole


# Tool call types
class ToolCallFunction(BaseModel):
name: str
arguments: str
Expand All @@ -26,16 +17,29 @@ class ToolCall(BaseModel):
function: ToolCallFunction


# Message types
class SystemMessage(BaseModel):
content: str
role: str = MessageRole.system
name: Optional[str] = None


class UserMessage(BaseModel):
content: Union[str, List[str]]
role: str = MessageRole.user
name: Optional[str] = None


class AssistantMessage(BaseModel):
content: Optional[str] = None
role: str = "assistant"
role: str = MessageRole.assistant
name: Optional[str] = None
tool_calls: Optional[List[ToolCall]] = None


class ToolMessage(BaseModel):
content: str
role: str = "tool"
role: str = MessageRole.tool
tool_call_id: str


Expand Down
8 changes: 7 additions & 1 deletion letta/schemas/openai/chat_completion_response.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,8 @@ class MessageDelta(BaseModel):
tool_calls: Optional[List[ToolCallDelta]] = None
# role: Optional[str] = None
function_call: Optional[FunctionCallDelta] = None # Deprecated
refusal: None = None # Appears in OpenAI spec
role: Optional[str] = "assistant"


class ChunkChoice(BaseModel):
Expand All @@ -132,9 +134,13 @@ class ChatCompletionChunkResponse(BaseModel):

id: str
choices: List[ChunkChoice]
created: datetime.datetime
created: Union[datetime.datetime, str]
model: str
# system_fingerprint: str # docs say this is mandatory, but in reality API returns None
system_fingerprint: Optional[str] = None
# object: str = Field(default="chat.completion")
object: Literal["chat.completion.chunk"] = "chat.completion.chunk"

# There's some extra fields in an actual response that are not included here
# Example:
# {"id":"chatcmpl-AOWlu4jOLWzmZ9JdfSuahcHETAkoQ","choices":[{"delta":{"content":" I","function_call":null,"refusal":null,"role":null,"tool_calls":null},"finish_reason":null,"index":0,"logprobs":null}],"created":1730409210,"model":"gpt-4-0613","object":"chat.completion.chunk","service_tier":null,"system_fingerprint":null,"usage":null}
123 changes: 0 additions & 123 deletions letta/schemas/openai/chat_completions.py

This file was deleted.

2 changes: 1 addition & 1 deletion letta/schemas/tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
)
from letta.functions.schema_generator import generate_schema_from_args_schema_v2
from letta.schemas.letta_base import LettaBase
from letta.schemas.openai.chat_completions import ToolCall
from letta.schemas.openai.chat_completion_request import ToolCall


class BaseTool(LettaBase):
Expand Down
Loading
Loading