Skip to content

Commit

Permalink
Refactor to a helper class for the agent's history (ConversationMemor…
Browse files Browse the repository at this point in the history
…y) (#7008)

Co-authored-by: openhands <[email protected]>
Co-authored-by: Calvin Smith <[email protected]>
  • Loading branch information
3 people authored Feb 28, 2025
1 parent 8a58e72 commit 0f07805
Show file tree
Hide file tree
Showing 7 changed files with 877 additions and 671 deletions.
40 changes: 16 additions & 24 deletions openhands/agenthub/codeact_agent/codeact_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,16 +9,13 @@
from openhands.core.config import AgentConfig
from openhands.core.logger import openhands_logger as logger
from openhands.core.message import Message, TextContent
from openhands.core.message_utils import (
apply_prompt_caching,
events_to_messages,
)
from openhands.events.action import (
Action,
AgentFinishAction,
)
from openhands.llm.llm import LLM
from openhands.memory.condenser import Condenser
from openhands.memory.conversation_memory import ConversationMemory
from openhands.runtime.plugins import (
AgentSkillsRequirement,
JupyterRequirement,
Expand Down Expand Up @@ -90,6 +87,9 @@ def __init__(
disabled_microagents=self.config.disabled_microagents,
)

# Create a ConversationMemory instance
self.conversation_memory = ConversationMemory(self.prompt_manager)

self.condenser = Condenser.from_config(self.config.condenser)
logger.debug(f'Using condenser: {self.condenser}')

Expand Down Expand Up @@ -168,13 +168,21 @@ def _get_messages(self, state: State) -> list[Message]:
if not self.prompt_manager:
raise Exception('Prompt Manager not instantiated.')

messages: list[Message] = self._initial_messages()
# Use conversation_memory to process events instead of calling events_to_messages directly
messages = self.conversation_memory.process_initial_messages(
with_caching=self.llm.is_caching_prompt_active()
)

# Condense the events from the state.
events = self.condenser.condensed_history(state)

messages += events_to_messages(
events,
logger.debug(
f'Processing {len(events)} events from a total of {len(state.history)} events'
)

messages = self.conversation_memory.process_events(
condensed_history=events,
initial_messages=messages,
max_message_chars=self.llm.config.max_message_chars,
vision_is_active=self.llm.vision_is_active(),
enable_som_visual_browsing=self.config.enable_som_visual_browsing,
Expand All @@ -183,26 +191,10 @@ def _get_messages(self, state: State) -> list[Message]:
messages = self._enhance_messages(messages)

if self.llm.is_caching_prompt_active():
apply_prompt_caching(messages)
self.conversation_memory.apply_prompt_caching(messages)

return messages

def _initial_messages(self) -> list[Message]:
"""Creates the initial messages (including the system prompt) for the LLM conversation."""
assert self.prompt_manager, 'Prompt Manager not instantiated.'

return [
Message(
role='system',
content=[
TextContent(
text=self.prompt_manager.get_system_message(),
cache_prompt=self.llm.is_caching_prompt_active(),
)
],
)
]

def _enhance_messages(self, messages: list[Message]) -> list[Message]:
"""Enhances the user message with additional context based on keywords matched.
Expand Down
Loading

0 comments on commit 0f07805

Please sign in to comment.