Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/letta agent #116

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion python/setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,16 @@ install_requires =
boto3==1.35.0

[options.extras_require]
letta =
letta==0.5.5
anthropic =
anthropic==0.32.0
openai =
openai==1.55.0
all =
anthropic==0.32.0
openai==1.55.0

letta==0.5.5
[options.packages.find]
where = src
exclude =
Expand Down
3 changes: 3 additions & 0 deletions python/src/multi_agent_orchestrator/agents/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from .chain_agent import ChainAgent, ChainAgentOptions
from .bedrock_translator_agent import BedrockTranslatorAgent, BedrockTranslatorAgentOptions
from .bedrock_inline_agent import BedrockInlineAgent, BedrockInlineAgentOptions
from .letta_agent import LettaAgent, LettaAgentOptions

try:
from .anthropic_agent import AnthropicAgent, AnthropicAgentOptions
Expand Down Expand Up @@ -40,6 +41,8 @@
'ChainAgentOptions',
'BedrockInlineAgent',
'BedrockInlineAgentOptions'
'LettaAgent',
'LettaAgentOptions',
]

if _ANTHROPIC_AVAILABLE:
Expand Down
82 changes: 82 additions & 0 deletions python/src/multi_agent_orchestrator/agents/letta_agent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
from dataclasses import dataclass
from typing import List, Optional, Dict
from letta import LocalClient
from letta.schemas.memory import ChatMemory
from letta.schemas.letta_response import LettaResponse
from letta import LLMConfig, EmbeddingConfig
from multi_agent_orchestrator.types import ConversationMessage
from multi_agent_orchestrator.agents import Agent, AgentOptions
from multi_agent_orchestrator.utils import Logger

@dataclass
class LettaAgentOptions(AgentOptions):
model_name: str = "letta"
model_name_embedding: str = "letta"


class LettaAgent(Agent):
"""
Represents a Letta agent that interacts with a runtime client.
Extends base Agent class and implements specific methods for Letta.
"""
def __init__(self, options: LettaAgentOptions):
super().__init__(options)
self.options = options
self.client = LocalClient()
self.client.set_default_llm_config(LLMConfig.default_config(model_name=options.model_name))
self.client.set_default_embedding_config(EmbeddingConfig.default_config(model_name=options.model_name_embedding))

try:
agent_state = self.client.get_agent_by_name(agent_name=options.name)
except ValueError:
agent_state = self.client.create_agent(
name=options.name,
memory=ChatMemory(
human=f"My name is {options.name}",
persona=options.description
)
)
self._letta_id = agent_state.id

async def process_request(
self,
input_text: str,
user_id: str,
session_id: str,
chat_history: List[ConversationMessage],
additional_params: Optional[Dict[str, str]] = None
) -> ConversationMessage:

response = self.client.send_message(
agent_id=self._letta_id,
message=input_text,
role="user"
)
return ConversationMessage(
role="assistant",
content=LettaAgent._process_response(response),
)

@staticmethod
def _process_response(response: LettaResponse) -> str:
"""
Extracts the message from the 'send_message' function call in the LettaResponse.

Args:
response (LettaResponse): The response object containing messages

Returns:
str: The extracted message from send_message function call, or empty string if not found
"""
for message in response.messages:
if (message.message_type == "function_call" and
message.function_call.name == "send_message"):

# Extract arguments string and convert to dictionary
import json
args = message.function_call.arguments
args_dict = json.loads(args)
return args_dict.get("message", "")

return ""

27 changes: 27 additions & 0 deletions python/src/tests/agents/test_letta_agent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import pytest
from unittest.mock import Mock, patch
from multi_agent_orchestrator.types import ConversationMessage
from multi_agent_orchestrator.agents import LettaAgent, LettaAgentOptions
from letta.schemas.letta_response import LettaResponse

@pytest.fixture
def mock_local_client():
with patch('letta.LocalClient') as mock_client:
yield mock_client

@pytest.fixture
def letta_agent(mock_local_client):
options = LettaAgentOptions(
name='test_agent_name',
description='test_agent description',
model_name='letta',
model_name_embedding='letta'
)
return LettaAgent(options)

def test_init(letta_agent, mock_local_client):
mock_local_client.return_value.get_agent_by_name.side_effect = ValueError()
mock_local_client.return_value.create_agent.return_value.id = 'test_agent_id'

assert letta_agent.options.name == 'test_agent_name'
assert letta_agent.options.description == 'test_agent description'
3 changes: 2 additions & 1 deletion python/test_requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,5 @@ boto3
anthropic
moto
pytest-mock
pytest-asyncio
pytest-asyncio
letta