Skip to content

Commit

Permalink
feat(backend/executor): Add TERMINATED execution status (#9185)
Browse files Browse the repository at this point in the history
- Resolves #9182

Formerly known as `FAILED` with error message `TERMINATED`.

### Changes 🏗️

- Add `TERMINATED` to `AgentExecutionStatus` enum in DB schema (and its
mirror in the front end)
- Update executor to give terminated node and graph executions status
`TERMINATED` instead of `FAILED`/`COMPLETED`
- Add `TERMINATED` case to status checks referencing
`AgentExecutionStatus`

### Checklist 📋

#### For code changes:
- [x] I have clearly listed my changes in the PR description
- [x] I have made a test plan
- [x] I have tested my changes according to the test plan:
  - Start and forcefully stop a graph execution

---------

Co-authored-by: Zamil Majdy <[email protected]>
  • Loading branch information
Pwuts and majdyz authored Jan 6, 2025
1 parent 081c4a6 commit c3caa11
Show file tree
Hide file tree
Showing 9 changed files with 43 additions and 22 deletions.
6 changes: 5 additions & 1 deletion autogpt_platform/backend/backend/blocks/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,11 @@ def run(self, input_data: Input, **kwargs) -> BlockOutput:
)

if not event.node_id:
if event.status in [ExecutionStatus.COMPLETED, ExecutionStatus.FAILED]:
if event.status in [
ExecutionStatus.COMPLETED,
ExecutionStatus.TERMINATED,
ExecutionStatus.FAILED,
]:
logger.info(f"Execution {log_id} ended with status {event.status}")
break
else:
Expand Down
2 changes: 1 addition & 1 deletion autogpt_platform/backend/backend/data/execution.py
Original file line number Diff line number Diff line change
Expand Up @@ -270,9 +270,9 @@ async def update_graph_execution_start_time(graph_exec_id: str):

async def update_graph_execution_stats(
graph_exec_id: str,
status: ExecutionStatus,
stats: dict[str, Any],
) -> ExecutionResult:
status = ExecutionStatus.FAILED if stats.get("error") else ExecutionStatus.COMPLETED
res = await AgentGraphExecution.prisma().update(
where={"id": graph_exec_id},
data={
Expand Down
28 changes: 15 additions & 13 deletions autogpt_platform/backend/backend/executor/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -597,14 +597,15 @@ def on_graph_execution(
node_eid="*",
block_name="-",
)
timing_info, (exec_stats, error) = cls._on_graph_execution(
timing_info, (exec_stats, status, error) = cls._on_graph_execution(
graph_exec, cancel, log_metadata
)
exec_stats["walltime"] = timing_info.wall_time
exec_stats["cputime"] = timing_info.cpu_time
exec_stats["error"] = str(error) if error else None
result = cls.db_client.update_graph_execution_stats(
graph_exec_id=graph_exec.graph_exec_id,
status=status,
stats=exec_stats,
)
cls.db_client.send_execution_update(result)
Expand All @@ -616,11 +617,12 @@ def _on_graph_execution(
graph_exec: GraphExecutionEntry,
cancel: threading.Event,
log_metadata: LogMetadata,
) -> tuple[dict[str, Any], Exception | None]:
) -> tuple[dict[str, Any], ExecutionStatus, Exception | None]:
"""
Returns:
The execution statistics of the graph execution.
The error that occurred during the execution.
dict: The execution statistics of the graph execution.
ExecutionStatus: The final status of the graph execution.
Exception | None: The error that occurred during the execution, if any.
"""
log_metadata.info(f"Start graph execution {graph_exec.graph_exec_id}")
exec_stats = {
Expand Down Expand Up @@ -665,8 +667,7 @@ def callback(result: object):

while not queue.empty():
if cancel.is_set():
error = RuntimeError("Execution is cancelled")
return exec_stats, error
return exec_stats, ExecutionStatus.TERMINATED, error

exec_data = queue.get()

Expand Down Expand Up @@ -696,8 +697,7 @@ def callback(result: object):
)
for node_id, execution in list(running_executions.items()):
if cancel.is_set():
error = RuntimeError("Execution is cancelled")
return exec_stats, error
return exec_stats, ExecutionStatus.TERMINATED, error

if not queue.empty():
break # yield to parent loop to execute new queue items
Expand All @@ -716,7 +716,12 @@ def callback(result: object):
finished = True
cancel.set()
cancel_thread.join()
return exec_stats, error

return (
exec_stats,
ExecutionStatus.FAILED if error else ExecutionStatus.COMPLETED,
error,
)


class ExecutionManager(AppService):
Expand Down Expand Up @@ -882,11 +887,8 @@ def cancel_execution(self, graph_exec_id: str) -> None:
ExecutionStatus.COMPLETED,
ExecutionStatus.FAILED,
):
self.db_client.upsert_execution_output(
node_exec.node_exec_id, "error", "TERMINATED"
)
exec_update = self.db_client.update_execution_status(
node_exec.node_exec_id, ExecutionStatus.FAILED
node_exec.node_exec_id, ExecutionStatus.TERMINATED
)
self.db_client.send_execution_update(exec_update)

Expand Down
3 changes: 3 additions & 0 deletions autogpt_platform/backend/backend/util/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,9 @@ async def is_execution_completed():
if status == ExecutionStatus.FAILED:
log.info("Execution failed")
raise Exception("Execution failed")
if status == ExecutionStatus.TERMINATED:
log.info("Execution terminated")
raise Exception("Execution terminated")
return status == ExecutionStatus.COMPLETED

# Wait for the executions to complete
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
-- Add "TERMINATED" to execution status enum type
ALTER TYPE "AgentExecutionStatus" ADD VALUE 'TERMINATED';
3 changes: 2 additions & 1 deletion autogpt_platform/backend/schema.prisma
Original file line number Diff line number Diff line change
Expand Up @@ -216,6 +216,7 @@ enum AgentExecutionStatus {
QUEUED
RUNNING
COMPLETED
TERMINATED
FAILED
}

Expand Down Expand Up @@ -638,4 +639,4 @@ enum APIKeyStatus {
ACTIVE
REVOKED
SUSPENDED
}
}
6 changes: 4 additions & 2 deletions autogpt_platform/frontend/src/components/CustomNode.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -848,8 +848,10 @@ export function CustomNode({
data.status === "COMPLETED",
"border-yellow-600 bg-yellow-600 text-white":
data.status === "RUNNING",
"border-red-600 bg-red-600 text-white":
data.status === "FAILED",
"border-red-600 bg-red-600 text-white": [
"FAILED",
"TERMINATED",
].includes(data.status || ""),
"border-blue-600 bg-blue-600 text-white":
data.status === "QUEUED",
"border-gray-600 bg-gray-600 font-black":
Expand Down
5 changes: 3 additions & 2 deletions autogpt_platform/frontend/src/hooks/useAgentGraph.ts
Original file line number Diff line number Diff line change
Expand Up @@ -558,8 +558,9 @@ export default function useAgentGraph(
return;
}
if (
nodeResult.status != "COMPLETED" &&
nodeResult.status != "FAILED"
!["COMPLETED", "TERMINATED", "FAILED"].includes(
nodeResult.status,
)
) {
pendingNodeExecutions.add(nodeResult.node_exec_id);
} else {
Expand Down
10 changes: 8 additions & 2 deletions autogpt_platform/frontend/src/lib/autogpt-server-api/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ export type GraphExecution = {
ended_at: number;
duration: number;
total_run_time: number;
status: "INCOMPLETE" | "QUEUED" | "RUNNING" | "COMPLETED" | "FAILED";
status: "QUEUED" | "RUNNING" | "COMPLETED" | "TERMINATED" | "FAILED";
graph_id: string;
graph_version: number;
};
Expand Down Expand Up @@ -246,7 +246,13 @@ export type NodeExecutionResult = {
node_exec_id: string;
node_id: string;
block_id: string;
status: "INCOMPLETE" | "QUEUED" | "RUNNING" | "COMPLETED" | "FAILED";
status:
| "INCOMPLETE"
| "QUEUED"
| "RUNNING"
| "COMPLETED"
| "TERMINATED"
| "FAILED";
input_data: { [key: string]: any };
output_data: { [key: string]: Array<any> };
add_time: Date;
Expand Down

0 comments on commit c3caa11

Please sign in to comment.