From b8338047af0b0e901562f2b6094a28388d5c6a51 Mon Sep 17 00:00:00 2001 From: User Date: Mon, 1 Jul 2024 14:56:22 +0300 Subject: [PATCH] fix llama_index run_step_decorator --- motleycrew/agents/llama_index/llama_index.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/motleycrew/agents/llama_index/llama_index.py b/motleycrew/agents/llama_index/llama_index.py index effdadc6..95a955dc 100644 --- a/motleycrew/agents/llama_index/llama_index.py +++ b/motleycrew/agents/llama_index/llama_index.py @@ -55,10 +55,7 @@ def __init__( def run_step_decorator(self): """Decorator for inclusion in the call chain of the agent, the output handler tool""" ensure_module_is_installed("llama_index") - def decorator(func): - output_task_step = None - def wrapper( task_id: str, step: Optional[TaskStep] = None, @@ -67,15 +64,14 @@ def wrapper( **kwargs: Any, ): - nonlocal output_task_step - try: cur_step_output = func(task_id, step, input, mode, **kwargs) except DirectOutput as output_exc: self.direct_output = output_exc output = AgentChatResponse(str(output_exc.output)) + task_step = TaskStep(task_id=task_id, step_id=str(uuid.uuid4())) cur_step_output = TaskStepOutput( - output=output, is_last=True, next_steps=[], task_step=output_task_step + output=output, is_last=True, next_steps=[], task_step=task_step ) return cur_step_output @@ -85,7 +81,6 @@ def wrapper( if cur_step_output.is_last: cur_step_output.is_last = False task_id = cur_step_output.task_step.task_id - agent_output = cur_step_output.output.response output_task_step = TaskStep( task_id=task_id, step_id=str(uuid.uuid4()),