Skip to content

Commit

Permalink
move meory
Browse files Browse the repository at this point in the history
  • Loading branch information
yyiilluu committed Jun 25, 2023
1 parent 97ec6e6 commit 54d12be
Show file tree
Hide file tree
Showing 5 changed files with 19 additions and 12 deletions.
12 changes: 7 additions & 5 deletions autochain/agent/conversational_agent/conversational_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ def plan(
self,
history: ChatMessageHistory,
intermediate_steps: List[AgentAction],
**kwargs: Any
**kwargs: Any,
) -> Union[AgentAction, AgentFinish]:
"""
Plan the next step. either taking an action with AgentAction or respond to user with AgentFinish
Expand All @@ -124,10 +124,12 @@ def plan(
tool_strings = "\n\n".join(
[f"> {tool.name}: \n{tool.description}" for tool in self.tools]
)
inputs = {"tool_names": tool_names,
"tools": tool_strings,
"history": history.format_message(),
**kwargs}
inputs = {
"tool_names": tool_names,
"tools": tool_strings,
"history": history.format_message(),
**kwargs,
}
final_prompt = self.format_prompt(
self.prompt_template, intermediate_steps, **inputs
)
Expand Down
2 changes: 1 addition & 1 deletion autochain/agent/support_agent/support_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ def plan(
final_messages = self.format_prompt(
self.prompt_template, intermediate_steps, **inputs
)
logger.info(f"\nFull Input: {[m.content for m in final_messages]} \n")
logger.info(f"\nFull Input: {final_messages[0].content} \n")

full_output: Generation = self.llm.generate(final_messages).generations[0]
agent_output: Union[AgentAction, AgentFinish] = self.output_parser.parse(
Expand Down
9 changes: 9 additions & 0 deletions autochain/chain/base_chain.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,15 @@ def _run(
next_step_output.intermediate_steps = intermediate_steps
return next_step_output

# stores action output into the conversation as FunctionMessage, which can be used by
# OpenAIFunctionAgent
if isinstance(next_step_output, AgentAction):
self.memory.save_conversation(
message=str(next_step_output.observation),
name=next_step_output.tool,
message_type=MessageType.FunctionMessage,
)

intermediate_steps.append(next_step_output)
# update inputs
inputs[constants.INTERMEDIATE_STEPS] = intermediate_steps
Expand Down
5 changes: 0 additions & 5 deletions autochain/chain/chain.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,11 +109,6 @@ def _take_next_step(
observation = f"Tool {output.tool} if not supported"

output.observation = observation
self.memory.save_conversation(
message=str(observation),
name=output.tool,
message_type=MessageType.FunctionMessage,
)
return output
else:
raise ValueError(f"Unsupported action: {type(output)}")
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,8 @@ class TestChangeShippingAddressWithFunctionCalling(BaseTest):

if __name__ == "__main__":
tester = WorkflowTester(
tests=[TestChangeShippingAddressWithFunctionCalling()], output_dir="./test_results"
tests=[TestChangeShippingAddressWithFunctionCalling()],
output_dir="./test_results",
)

args = get_test_args()
Expand Down

0 comments on commit 54d12be

Please sign in to comment.