Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use keyword matching for CodeAct microagents #4568

Merged
merged 34 commits into from
Nov 9, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
34 commits
Select commit Hold shift + click to select a range
e798bc6
first pass at gh microagent triggers
rbren Oct 16, 2024
c9a3cc7
first pass at using gh micro
rbren Oct 17, 2024
2461491
Merge branch 'main' into rb/gh-micro-agent
rbren Oct 21, 2024
186f2ac
more instructions
rbren Oct 21, 2024
f45fa1c
Merge branch 'main' into rb/gh-micro-agent
rbren Oct 25, 2024
56a9469
Update frontend/src/components/project-menu/ProjectMenuCard.tsx
rbren Oct 25, 2024
2669fdb
fix test
rbren Oct 25, 2024
004ffc0
Merge branch 'main' into rb/gh-micro-agent
rbren Oct 25, 2024
d891626
fix tests
rbren Oct 25, 2024
32b7ef2
better messages
rbren Oct 25, 2024
4f5c8f9
better prompt hints
rbren Oct 25, 2024
f7ee9fe
more fixes
rbren Oct 25, 2024
eb0f056
fix up last_user_message logic
rbren Oct 25, 2024
3366e87
move env reminder back to bottom
rbren Oct 25, 2024
300c0fc
remove microagents template
rbren Oct 25, 2024
38006d8
Merge branch 'main' into rb/gh-micro-agent
rbren Nov 7, 2024
f450d74
fix some merge issues
rbren Nov 7, 2024
19782be
fix some merge issues
rbren Nov 7, 2024
c087dc8
fix function calling prompt
rbren Nov 7, 2024
f8a1c35
remove extra promptman
rbren Nov 7, 2024
d847a54
fix dirs
rbren Nov 7, 2024
0724514
make easter egg less likely to cause problems
rbren Nov 7, 2024
64a1c61
Merge branch 'main' into rb/gh-micro-agent
rbren Nov 7, 2024
b7889bd
fix tets
rbren Nov 7, 2024
44d27bf
lint
rbren Nov 7, 2024
8d772eb
fix tests
rbren Nov 7, 2024
efdd230
use xml
rbren Nov 7, 2024
0cae5ec
always decorate messages to fix caching
rbren Nov 7, 2024
e548756
lint
rbren Nov 7, 2024
64627e3
update eval for api change
rbren Nov 8, 2024
199ee9e
Update openhands/agenthub/codeact_agent/micro/github.md
rbren Nov 8, 2024
cb5614e
Update openhands/agenthub/codeact_agent/micro/github.md
rbren Nov 8, 2024
c016264
Merge branch 'main' into rb/gh-micro-agent
rbren Nov 8, 2024
b8879e7
Update openhands/agenthub/codeact_agent/micro/github.md
rbren Nov 8, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
72 changes: 34 additions & 38 deletions openhands/agenthub/codeact_agent/codeact_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,26 @@ def step(self, state: State) -> Action:
return self.action_parser.parse(response)

def _get_messages(self, state: State) -> list[Message]:
messages: list[Message] = []
messages: list[Message] = [
Message(
role='system',
content=[
TextContent(
text=self.prompt_manager.get_system_message(),
cache_prompt=self.llm.is_caching_prompt_active(), # Cache system prompt
)
],
),
Message(
role='user',
content=[
TextContent(
text=self.prompt_manager.get_example_user_message(),
cache_prompt=self.llm.is_caching_prompt_active(), # if the user asks the same query,
)
],
),
]

for event in state.history.get_events():
# create a regular message from an event
Expand All @@ -221,6 +240,7 @@ def _get_messages(self, state: State) -> list[Message]:
else:
raise ValueError(f'Unknown event type: {type(event)}')

# add regular message
if message:
# handle error if the message is the SAME role as the previous message
# litellm.exceptions.BadRequestError: litellm.BadRequestError: OpenAIException - Error code: 400 - {'detail': 'Only supports u/a/u/a/u...'}
Expand All @@ -230,6 +250,18 @@ def _get_messages(self, state: State) -> list[Message]:
else:
messages.append(message)

# Add caching to the last 2 user messages
if self.llm.is_caching_prompt_active():
user_turns_processed = 0
for message in reversed(messages):
if message.role == 'user' and user_turns_processed < 2:
message.content[
-1
].cache_prompt = True # Last item inside the message content
user_turns_processed += 1

# The latest user message is important:
# we want to remind the agent of the environment constraints
latest_user_message = next(
islice(
(
Expand All @@ -242,43 +274,7 @@ def _get_messages(self, state: State) -> list[Message]:
),
None,
)
messages = (
[
Message(
role='system',
content=[
TextContent(
text=self.prompt_manager.get_system_message(),
cache_prompt=self.llm.is_caching_prompt_active(), # Cache system prompt
)
],
),
Message(
role='user',
content=[
TextContent(
text=self.prompt_manager.get_example_user_message(
''
if latest_user_message is None
else latest_user_message.content[-1].text
),
cache_prompt=self.llm.is_caching_prompt_active(), # if the user asks the same query,
)
],
),
]
+ messages
)

if latest_user_message:
reminder_text = f'\n\nENVIRONMENT REMINDER: You have {state.max_iterations - state.iteration} turns left to complete the task. When finished reply with <finish></finish>.'
latest_user_message.content.append(TextContent(text=reminder_text))

if self.llm.is_caching_prompt_active():
user_turns_processed = 0
for message in reversed(messages):
if message.role == 'user' and user_turns_processed < 2:
message.content[-1].cache_prompt = True
user_turns_processed += 1
self.prompt_manager.enhance_message(latest_user_message, state)

return messages
26 changes: 20 additions & 6 deletions openhands/utils/prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@

from jinja2 import Template

from openhands.controller.state.state import State
from openhands.core.message import Message, TextContent
from openhands.utils.microagent import MicroAgent


Expand Down Expand Up @@ -53,7 +55,7 @@ def get_system_message(self) -> str:
).strip()
return rendered

def get_example_user_message(self, latest_user_message: str) -> str:
def get_example_user_message(self) -> str:
"""This is the initial user message provided to the agent
before *actual* user instructions are provided.

Expand All @@ -63,11 +65,23 @@ def get_example_user_message(self, latest_user_message: str) -> str:
These additional context will convert the current generic agent
into a more specialized agent that is tailored to the user's task.
"""
return self.user_template.render().strip()

def enhance_message(self, message: Message, state: State) -> None:
"""Enhance the user message with additional context.

This method is used to enhance the user message with additional context
about the user's task. The additional context will convert the current
generic agent into a more specialized agent that is tailored to the user's task.
"""
micro_agent_prompts = []
for micro_agent in self.microagents.values():
if micro_agent.should_trigger(latest_user_message):
if micro_agent.should_trigger(message):
micro_agent_prompts.append(micro_agent.content)
rendered = self.user_template.render(
micro_agents=micro_agent_prompts,
)
return rendered.strip()
if len(micro_agent_prompts) > 0:
micro_text = "EXTRA INFO: the following information has been included based on a keyword match. It may or may not be relevant to the user's request.\n\n"
for micro_agent_prompt in micro_agent_prompts:
micro_text += micro_agent_prompt + '\n\n'
message.content.append(TextContent(text=micro_text))
reminder_text = f'ENVIRONMENT REMINDER: You have {state.max_iterations - state.iteration} turns left to complete the task. When finished reply with <finish></finish>.'
message.content.append(TextContent(text=reminder_text))