From 1f9c901ff329f12d76122565de1bc268596357f7 Mon Sep 17 00:00:00 2001 From: fatihozturkh2o Date: Mon, 9 Sep 2024 04:42:54 -0700 Subject: [PATCH] GeneralKnowledgeAgent -> ChatAgent --- openai_server/agent_backend.py | 8 ++++---- openai_server/agents.py | 29 +++++++++++++++++------------ 2 files changed, 21 insertions(+), 16 deletions(-) diff --git a/openai_server/agent_backend.py b/openai_server/agent_backend.py index d749ee5f2..82688057c 100644 --- a/openai_server/agent_backend.py +++ b/openai_server/agent_backend.py @@ -305,7 +305,7 @@ def run_autogen(query=None, get_code_executor, get_human_proxy_agent, get_main_group_chat_manager, - get_general_knowledge_agent, + get_chat_agent, get_code_group_chat_manager ) @@ -352,9 +352,9 @@ def run_autogen(query=None, autogen_max_consecutive_auto_reply=autogen_max_consecutive_auto_reply, ) - general_knowledge_agent = get_general_knowledge_agent( + chat_agent = get_chat_agent( llm_config=llm_config, - autogen_max_consecutive_auto_reply=1, # Always 1 turn for general knowledge agent + autogen_max_consecutive_auto_reply=1, # Always 1 turn for chat agent ) code_group_chat_manager = get_code_group_chat_manager( llm_config=llm_config, @@ -366,7 +366,7 @@ def run_autogen(query=None, main_group_chat_manager = get_main_group_chat_manager( llm_config=llm_config, prompt=query, - agents=[general_knowledge_agent, code_group_chat_manager], + agents=[chat_agent, code_group_chat_manager], max_round=40, ) # apply chat history to human_proxy_agent and main_group_chat_manager diff --git a/openai_server/agents.py b/openai_server/agents.py index 6cf938284..47dd9f853 100644 --- a/openai_server/agents.py +++ b/openai_server/agents.py @@ -78,31 +78,36 @@ def get_code_writer_agent( ) return code_writer_agent -def get_general_knowledge_agent( +def get_chat_agent( llm_config:dict, autogen_max_consecutive_auto_reply:int = 1, ) -> H2OConversableAgent: - gk_system_message = "You answer the question or request provided with natural language only. You can not generate or execute codes. You can not talk to web. You are good at chatting. " - # TODO: Think about the Terminate procedure - # gk_system_message += ( - # f"Add 'TERMINATE' at the end of your response if you think you have enough finding or results to answer user request: {prompt}" - # ) - general_knowledge_agent = H2OConversableAgent( - name="general_knowledge_agent", - system_message=gk_system_message, + system_message = ( + "You answer the question or request provided with natural language only. " + "You can not generate or execute codes. " + "You can not talk to web. " + "You are good at chatting. " + "You are good at answering general knowledge questions " + "based on your own memory or past conversation context. " + ) + + chat_agent = H2OConversableAgent( + name="chat_agent", + system_message=system_message, llm_config=llm_config, code_execution_config=False, # Turn off code execution for this agent. human_input_mode="NEVER", max_consecutive_auto_reply=autogen_max_consecutive_auto_reply, ) - general_knowledge_agent.description = ( - "This agent is able to answer general knowledge questions based on its own memory or past conversation context. " + chat_agent.description = ( + "This agent is able to answer general knowledge questions " + "based on its own memory or past conversation context. " "Only answers with natural language. " "It can not execute codes. " "It can not generate code examples. " "It's only good at chatting and answering simple questions. " ) - return general_knowledge_agent + return chat_agent def get_human_proxy_agent( llm_config:dict,