From c2a5fbceb703033ee35619e5c9c0e393b212becb Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Wed, 13 Nov 2024 11:45:43 -0700 Subject: [PATCH 01/91] Server side work for socketio. Still needs testing and integration of proper client (Rather than the mock I've been using) --- frontend/vite.config.ts | 6 + openhands/server/listen.py | 69 +++ openhands/server/session/manager.py | 21 +- openhands/server/session/session.py | 14 +- poetry.lock | 920 +++++++++++++--------------- pyproject.toml | 4 +- 6 files changed, 545 insertions(+), 489 deletions(-) diff --git a/frontend/vite.config.ts b/frontend/vite.config.ts index 4c403aca8496..1e963acd7127 100644 --- a/frontend/vite.config.ts +++ b/frontend/vite.config.ts @@ -82,6 +82,12 @@ export default defineConfig(({ mode }) => { changeOrigin: true, secure: !INSECURE_SKIP_VERIFY, }, + "/socket.io": { + target: WS_URL, + ws: true, + changeOrigin: true, + secure: !INSECURE_SKIP_VERIFY, + } }, }, ssr: { diff --git a/openhands/server/listen.py b/openhands/server/listen.py index 3b4db2daddad..a487a6d5cac9 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -1,3 +1,4 @@ +from ast import parse import asyncio import os import re @@ -10,7 +11,9 @@ import requests from pathspec import PathSpec from pathspec.patterns import GitWildMatchPattern +import socketio +from openhands.events.serialization.event import event_from_dict from openhands.security.options import SecurityAnalyzers from openhands.server.data_models.feedback import FeedbackDataModel, store_feedback from openhands.server.github import ( @@ -902,3 +905,69 @@ async def get_response(self, path: str, scope): app.mount('/', SPAStaticFiles(directory='./frontend/build', html=True), name='dist') + +sio = socketio.AsyncServer(async_mode='asgi', cors_allowed_origins='*') +app = socketio.ASGIApp(sio, other_asgi_app=app) + + +@sio.event +async def connect(session_id: str, environ): + logger.info(f"SIO:CONNECT: {session_id}") + + jwt_token = environ.get("HTTP_OH_TOKEN", '') + if jwt_token: + old_session_id = get_sid_from_token(jwt_token, config.jwt_secret) + if old_session_id == '': + sio.send({'error': 'Invalid token', 'error_code': 401}) + return + logger.info(f'Renaming existing session: {old_session_id} to {session_id}') + session = session_manager.rename_existing_session(old_session_id, session_id) + else: + session_id = str(uuid.uuid4()) + jwt_token = sign_token({'sid': session_id}, config.jwt_secret) + logger.info(f'New session: {session_id}') + session = session_manager.add_new_session(sio, session_id) + + github_token = environ.get('HTTP_GITHUB_TOKEN', '') + if not await authenticate_github_user(github_token): + raise RuntimeError(status.WS_1008_POLICY_VIOLATION) + + logger.info("TODO: Session work here...") + await session.send({'token': jwt_token, 'status': 'ok'}) + + latest_event_id = int(environ.get('HTTP_LATEST_EVENT_ID', -1)) + async_stream = AsyncEventStreamWrapper( + session.agent_session.event_stream, latest_event_id + 1 + ) + + async for event in async_stream: + if isinstance( + event, + ( + NullAction, + NullObservation, + ChangeAgentStateAction, + AgentStateChangedObservation, + ), + ): + continue + await session.send(event_to_dict(event)) + + +@sio.event +async def oh_action(session_id, data): + logger.info(f"SIO:OH_ACTION:{session_id}") + session = session_manager.get_existing_session(session_id) + if session is None: + raise ValueError(f'no_such_session_id:{session_id}') + session.on_event(event_from_dict(data)) + + +@sio.event +def disconnect(sid): + logger.info(f'SIO:DISCONNECT:{sid}') + # I dunno about this - should we create a new one? + #session = session_manager.close(session_id) + #if session is None: + # raise ValueError(f'no_such_session_id:{session_id}') + diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index f746b3676e29..92b6a3d4eff1 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -1,7 +1,8 @@ import time -from dataclasses import dataclass +from dataclasses import dataclass, field from fastapi import WebSocket +import socketio from openhands.core.config import AppConfig from openhands.core.logger import openhands_logger as logger @@ -15,11 +16,17 @@ class SessionManager: config: AppConfig file_store: FileStore + sessions: dict[str, Session] = field(default_factory=dict) - def add_or_restart_session(self, sid: str, ws_conn: WebSocket) -> Session: - return Session( - sid=sid, file_store=self.file_store, ws=ws_conn, config=self.config + def add_or_restart_session(self, sid: str, ws_conn: WebSocket, sio: socketio.AsyncServer | None, socket_id: str = None) -> Session: + session = Session( + sid=sid, file_store=self.file_store, ws=ws_conn, config=self.config, sio=sio, socket_id=socket_id ) + self.sessions[sid] = session + return session + + def get_existing_session(self, sid: str): + return self.sessions.get(sid) async def attach_to_conversation(self, sid: str) -> Conversation | None: start_time = time.time() @@ -35,3 +42,9 @@ async def attach_to_conversation(self, sid: str) -> Conversation | None: async def detach_from_conversation(self, conversation: Conversation): await conversation.disconnect() + + async def stop_session(self, sid: str) -> bool: + session = self.sessions.pop(sid, None) + if session: + session.close() + return bool(session) diff --git a/openhands/server/session/session.py b/openhands/server/session/session.py index 25f707f15f53..ca8156df5d99 100644 --- a/openhands/server/session/session.py +++ b/openhands/server/session/session.py @@ -2,6 +2,7 @@ import time from fastapi import WebSocket, WebSocketDisconnect +import socketio from openhands.controller.agent import Agent from openhands.core.config import AppConfig @@ -29,16 +30,20 @@ class Session: sid: str websocket: WebSocket | None + sio: socketio.AsyncServer | None + socket_id: str | None last_active_ts: int = 0 is_alive: bool = True agent_session: AgentSession loop: asyncio.AbstractEventLoop def __init__( - self, sid: str, ws: WebSocket | None, config: AppConfig, file_store: FileStore + self, sid: str, ws: WebSocket | None, config: AppConfig, file_store: FileStore, sio: socketio.AsyncServer | None, socket_id: str | None ): self.sid = sid self.websocket = ws + self.sio = sio + self.socket_id = socket_id self.last_active_ts = int(time.time()) self.agent_session = AgentSession( sid, file_store, status_callback=self.queue_status_message @@ -180,9 +185,12 @@ async def dispatch(self, data: dict): async def send(self, data: dict[str, object]) -> bool: try: - if self.websocket is None or not self.is_alive: + if not self.is_alive: return False - await self.websocket.send_json(data) + if self.websocket: + await self.websocket.send_json(data) + if self.socket_id: + await self.sio.emit(data, to=self.socket_id) await asyncio.sleep(0.001) # This flushes the data to the client self.last_active_ts = int(time.time()) return True diff --git a/poetry.lock b/poetry.lock index 7b45e1b62efc..3bd30775b2c1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "aenum" @@ -553,6 +553,17 @@ charset-normalizer = ["charset-normalizer"] html5lib = ["html5lib"] lxml = ["lxml"] +[[package]] +name = "bidict" +version = "0.23.1" +description = "The bidirectional mapping library for Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bidict-0.23.1-py3-none-any.whl", hash = "sha256:5dae8d4d79b552a71cbabc7deb25dfe8ce710b17ff41711e13010ead2abfc3e5"}, + {file = "bidict-0.23.1.tar.gz", hash = "sha256:03069d763bc387bbd20e7d49914e75fc4132a41937fa3405417e1a5a2d006d71"}, +] + [[package]] name = "bleach" version = "6.2.0" @@ -583,17 +594,17 @@ files = [ [[package]] name = "boto3" -version = "1.35.57" +version = "1.35.59" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.35.57-py3-none-any.whl", hash = "sha256:9edf49640c79a05b0a72f4c2d1e24dfc164344b680535a645f455ac624dc3680"}, - {file = "boto3-1.35.57.tar.gz", hash = "sha256:db58348849a5af061f0f5ec9c3b699da5221ca83354059fdccb798e3ddb6b62a"}, + {file = "boto3-1.35.59-py3-none-any.whl", hash = "sha256:8f8ff97cb9cb2e1ec7374209d0c09c1926b75604d6464c34bafaffd6d6cf0529"}, + {file = "boto3-1.35.59.tar.gz", hash = "sha256:81f4d8d6eff3e26b82cabd42eda816cfac9482821fdef353f18d2ba2f6e75f2d"}, ] [package.dependencies] -botocore = ">=1.35.57,<1.36.0" +botocore = ">=1.35.59,<1.36.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -602,13 +613,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.35.57" +version = "1.35.59" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.35.57-py3-none-any.whl", hash = "sha256:92ddd02469213766872cb2399269dd20948f90348b42bf08379881d5e946cc34"}, - {file = "botocore-1.35.57.tar.gz", hash = "sha256:d96306558085baf0bcb3b022d7a8c39c93494f031edb376694d2b2dcd0e81327"}, + {file = "botocore-1.35.59-py3-none-any.whl", hash = "sha256:bcd66d7f55c8d1b6020eb86f2d87893fe591fb4be6a7d2a689c18be586452334"}, + {file = "botocore-1.35.59.tar.gz", hash = "sha256:de0ce655fedfc02c87869dfaa3b622488a17ff37da316ef8106cbe1573b83c98"}, ] [package.dependencies] @@ -1171,76 +1182,65 @@ test = ["pytest"] [[package]] name = "contourpy" -version = "1.3.0" +version = "1.3.1" description = "Python library for calculating contours of 2D quadrilateral grids" optional = false -python-versions = ">=3.9" +python-versions = ">=3.10" files = [ - {file = "contourpy-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:880ea32e5c774634f9fcd46504bf9f080a41ad855f4fef54f5380f5133d343c7"}, - {file = "contourpy-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:76c905ef940a4474a6289c71d53122a4f77766eef23c03cd57016ce19d0f7b42"}, - {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92f8557cbb07415a4d6fa191f20fd9d2d9eb9c0b61d1b2f52a8926e43c6e9af7"}, - {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36f965570cff02b874773c49bfe85562b47030805d7d8360748f3eca570f4cab"}, - {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cacd81e2d4b6f89c9f8a5b69b86490152ff39afc58a95af002a398273e5ce589"}, - {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69375194457ad0fad3a839b9e29aa0b0ed53bb54db1bfb6c3ae43d111c31ce41"}, - {file = "contourpy-1.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a52040312b1a858b5e31ef28c2e865376a386c60c0e248370bbea2d3f3b760d"}, - {file = "contourpy-1.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3faeb2998e4fcb256542e8a926d08da08977f7f5e62cf733f3c211c2a5586223"}, - {file = "contourpy-1.3.0-cp310-cp310-win32.whl", hash = "sha256:36e0cff201bcb17a0a8ecc7f454fe078437fa6bda730e695a92f2d9932bd507f"}, - {file = "contourpy-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:87ddffef1dbe5e669b5c2440b643d3fdd8622a348fe1983fad7a0f0ccb1cd67b"}, - {file = "contourpy-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fa4c02abe6c446ba70d96ece336e621efa4aecae43eaa9b030ae5fb92b309ad"}, - {file = "contourpy-1.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:834e0cfe17ba12f79963861e0f908556b2cedd52e1f75e6578801febcc6a9f49"}, - {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbc4c3217eee163fa3984fd1567632b48d6dfd29216da3ded3d7b844a8014a66"}, - {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4865cd1d419e0c7a7bf6de1777b185eebdc51470800a9f42b9e9decf17762081"}, - {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:303c252947ab4b14c08afeb52375b26781ccd6a5ccd81abcdfc1fafd14cf93c1"}, - {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:637f674226be46f6ba372fd29d9523dd977a291f66ab2a74fbeb5530bb3f445d"}, - {file = "contourpy-1.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:76a896b2f195b57db25d6b44e7e03f221d32fe318d03ede41f8b4d9ba1bff53c"}, - {file = "contourpy-1.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e1fd23e9d01591bab45546c089ae89d926917a66dceb3abcf01f6105d927e2cb"}, - {file = "contourpy-1.3.0-cp311-cp311-win32.whl", hash = "sha256:d402880b84df3bec6eab53cd0cf802cae6a2ef9537e70cf75e91618a3801c20c"}, - {file = "contourpy-1.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:6cb6cc968059db9c62cb35fbf70248f40994dfcd7aa10444bbf8b3faeb7c2d67"}, - {file = "contourpy-1.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:570ef7cf892f0afbe5b2ee410c507ce12e15a5fa91017a0009f79f7d93a1268f"}, - {file = "contourpy-1.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:da84c537cb8b97d153e9fb208c221c45605f73147bd4cadd23bdae915042aad6"}, - {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0be4d8425bfa755e0fd76ee1e019636ccc7c29f77a7c86b4328a9eb6a26d0639"}, - {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c0da700bf58f6e0b65312d0a5e695179a71d0163957fa381bb3c1f72972537c"}, - {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb8b141bb00fa977d9122636b16aa67d37fd40a3d8b52dd837e536d64b9a4d06"}, - {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3634b5385c6716c258d0419c46d05c8aa7dc8cb70326c9a4fb66b69ad2b52e09"}, - {file = "contourpy-1.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0dce35502151b6bd35027ac39ba6e5a44be13a68f55735c3612c568cac3805fd"}, - {file = "contourpy-1.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea348f053c645100612b333adc5983d87be69acdc6d77d3169c090d3b01dc35"}, - {file = "contourpy-1.3.0-cp312-cp312-win32.whl", hash = "sha256:90f73a5116ad1ba7174341ef3ea5c3150ddf20b024b98fb0c3b29034752c8aeb"}, - {file = "contourpy-1.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:b11b39aea6be6764f84360fce6c82211a9db32a7c7de8fa6dd5397cf1d079c3b"}, - {file = "contourpy-1.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3e1c7fa44aaae40a2247e2e8e0627f4bea3dd257014764aa644f319a5f8600e3"}, - {file = "contourpy-1.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:364174c2a76057feef647c802652f00953b575723062560498dc7930fc9b1cb7"}, - {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32b238b3b3b649e09ce9aaf51f0c261d38644bdfa35cbaf7b263457850957a84"}, - {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d51fca85f9f7ad0b65b4b9fe800406d0d77017d7270d31ec3fb1cc07358fdea0"}, - {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:732896af21716b29ab3e988d4ce14bc5133733b85956316fb0c56355f398099b"}, - {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d73f659398a0904e125280836ae6f88ba9b178b2fed6884f3b1f95b989d2c8da"}, - {file = "contourpy-1.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c6c7c2408b7048082932cf4e641fa3b8ca848259212f51c8c59c45aa7ac18f14"}, - {file = "contourpy-1.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f317576606de89da6b7e0861cf6061f6146ead3528acabff9236458a6ba467f8"}, - {file = "contourpy-1.3.0-cp313-cp313-win32.whl", hash = "sha256:31cd3a85dbdf1fc002280c65caa7e2b5f65e4a973fcdf70dd2fdcb9868069294"}, - {file = "contourpy-1.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:4553c421929ec95fb07b3aaca0fae668b2eb5a5203d1217ca7c34c063c53d087"}, - {file = "contourpy-1.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:345af746d7766821d05d72cb8f3845dfd08dd137101a2cb9b24de277d716def8"}, - {file = "contourpy-1.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3bb3808858a9dc68f6f03d319acd5f1b8a337e6cdda197f02f4b8ff67ad2057b"}, - {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:420d39daa61aab1221567b42eecb01112908b2cab7f1b4106a52caaec8d36973"}, - {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4d63ee447261e963af02642ffcb864e5a2ee4cbfd78080657a9880b8b1868e18"}, - {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:167d6c890815e1dac9536dca00828b445d5d0df4d6a8c6adb4a7ec3166812fa8"}, - {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:710a26b3dc80c0e4febf04555de66f5fd17e9cf7170a7b08000601a10570bda6"}, - {file = "contourpy-1.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:75ee7cb1a14c617f34a51d11fa7524173e56551646828353c4af859c56b766e2"}, - {file = "contourpy-1.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:33c92cdae89ec5135d036e7218e69b0bb2851206077251f04a6c4e0e21f03927"}, - {file = "contourpy-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a11077e395f67ffc2c44ec2418cfebed032cd6da3022a94fc227b6faf8e2acb8"}, - {file = "contourpy-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e8134301d7e204c88ed7ab50028ba06c683000040ede1d617298611f9dc6240c"}, - {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e12968fdfd5bb45ffdf6192a590bd8ddd3ba9e58360b29683c6bb71a7b41edca"}, - {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fd2a0fc506eccaaa7595b7e1418951f213cf8255be2600f1ea1b61e46a60c55f"}, - {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4cfb5c62ce023dfc410d6059c936dcf96442ba40814aefbfa575425a3a7f19dc"}, - {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68a32389b06b82c2fdd68276148d7b9275b5f5cf13e5417e4252f6d1a34f72a2"}, - {file = "contourpy-1.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:94e848a6b83da10898cbf1311a815f770acc9b6a3f2d646f330d57eb4e87592e"}, - {file = "contourpy-1.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d78ab28a03c854a873787a0a42254a0ccb3cb133c672f645c9f9c8f3ae9d0800"}, - {file = "contourpy-1.3.0-cp39-cp39-win32.whl", hash = "sha256:81cb5ed4952aae6014bc9d0421dec7c5835c9c8c31cdf51910b708f548cf58e5"}, - {file = "contourpy-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:14e262f67bd7e6eb6880bc564dcda30b15e351a594657e55b7eec94b6ef72843"}, - {file = "contourpy-1.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:fe41b41505a5a33aeaed2a613dccaeaa74e0e3ead6dd6fd3a118fb471644fd6c"}, - {file = "contourpy-1.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eca7e17a65f72a5133bdbec9ecf22401c62bcf4821361ef7811faee695799779"}, - {file = "contourpy-1.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1ec4dc6bf570f5b22ed0d7efba0dfa9c5b9e0431aeea7581aa217542d9e809a4"}, - {file = "contourpy-1.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:00ccd0dbaad6d804ab259820fa7cb0b8036bda0686ef844d24125d8287178ce0"}, - {file = "contourpy-1.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ca947601224119117f7c19c9cdf6b3ab54c5726ef1d906aa4a69dfb6dd58102"}, - {file = "contourpy-1.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c6ec93afeb848a0845a18989da3beca3eec2c0f852322efe21af1931147d12cb"}, - {file = "contourpy-1.3.0.tar.gz", hash = "sha256:7ffa0db17717a8ffb127efd0c95a4362d996b892c2904db72428d5b52e1938a4"}, + {file = "contourpy-1.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a045f341a77b77e1c5de31e74e966537bba9f3c4099b35bf4c2e3939dd54cdab"}, + {file = "contourpy-1.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:500360b77259914f7805af7462e41f9cb7ca92ad38e9f94d6c8641b089338124"}, + {file = "contourpy-1.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2f926efda994cdf3c8d3fdb40b9962f86edbc4457e739277b961eced3d0b4c1"}, + {file = "contourpy-1.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:adce39d67c0edf383647a3a007de0a45fd1b08dedaa5318404f1a73059c2512b"}, + {file = "contourpy-1.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abbb49fb7dac584e5abc6636b7b2a7227111c4f771005853e7d25176daaf8453"}, + {file = "contourpy-1.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0cffcbede75c059f535725c1680dfb17b6ba8753f0c74b14e6a9c68c29d7ea3"}, + {file = "contourpy-1.3.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ab29962927945d89d9b293eabd0d59aea28d887d4f3be6c22deaefbb938a7277"}, + {file = "contourpy-1.3.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:974d8145f8ca354498005b5b981165b74a195abfae9a8129df3e56771961d595"}, + {file = "contourpy-1.3.1-cp310-cp310-win32.whl", hash = "sha256:ac4578ac281983f63b400f7fe6c101bedc10651650eef012be1ccffcbacf3697"}, + {file = "contourpy-1.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:174e758c66bbc1c8576992cec9599ce8b6672b741b5d336b5c74e35ac382b18e"}, + {file = "contourpy-1.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3e8b974d8db2c5610fb4e76307e265de0edb655ae8169e8b21f41807ccbeec4b"}, + {file = "contourpy-1.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:20914c8c973f41456337652a6eeca26d2148aa96dd7ac323b74516988bea89fc"}, + {file = "contourpy-1.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19d40d37c1c3a4961b4619dd9d77b12124a453cc3d02bb31a07d58ef684d3d86"}, + {file = "contourpy-1.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:113231fe3825ebf6f15eaa8bc1f5b0ddc19d42b733345eae0934cb291beb88b6"}, + {file = "contourpy-1.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4dbbc03a40f916a8420e420d63e96a1258d3d1b58cbdfd8d1f07b49fcbd38e85"}, + {file = "contourpy-1.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a04ecd68acbd77fa2d39723ceca4c3197cb2969633836ced1bea14e219d077c"}, + {file = "contourpy-1.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c414fc1ed8ee1dbd5da626cf3710c6013d3d27456651d156711fa24f24bd1291"}, + {file = "contourpy-1.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:31c1b55c1f34f80557d3830d3dd93ba722ce7e33a0b472cba0ec3b6535684d8f"}, + {file = "contourpy-1.3.1-cp311-cp311-win32.whl", hash = "sha256:f611e628ef06670df83fce17805c344710ca5cde01edfdc72751311da8585375"}, + {file = "contourpy-1.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:b2bdca22a27e35f16794cf585832e542123296b4687f9fd96822db6bae17bfc9"}, + {file = "contourpy-1.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0ffa84be8e0bd33410b17189f7164c3589c229ce5db85798076a3fa136d0e509"}, + {file = "contourpy-1.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805617228ba7e2cbbfb6c503858e626ab528ac2a32a04a2fe88ffaf6b02c32bc"}, + {file = "contourpy-1.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade08d343436a94e633db932e7e8407fe7de8083967962b46bdfc1b0ced39454"}, + {file = "contourpy-1.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:47734d7073fb4590b4a40122b35917cd77be5722d80683b249dac1de266aac80"}, + {file = "contourpy-1.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2ba94a401342fc0f8b948e57d977557fbf4d515f03c67682dd5c6191cb2d16ec"}, + {file = "contourpy-1.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efa874e87e4a647fd2e4f514d5e91c7d493697127beb95e77d2f7561f6905bd9"}, + {file = "contourpy-1.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1bf98051f1045b15c87868dbaea84f92408337d4f81d0e449ee41920ea121d3b"}, + {file = "contourpy-1.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:61332c87493b00091423e747ea78200659dc09bdf7fd69edd5e98cef5d3e9a8d"}, + {file = "contourpy-1.3.1-cp312-cp312-win32.whl", hash = "sha256:e914a8cb05ce5c809dd0fe350cfbb4e881bde5e2a38dc04e3afe1b3e58bd158e"}, + {file = "contourpy-1.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:08d9d449a61cf53033612cb368f3a1b26cd7835d9b8cd326647efe43bca7568d"}, + {file = "contourpy-1.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a761d9ccfc5e2ecd1bf05534eda382aa14c3e4f9205ba5b1684ecfe400716ef2"}, + {file = "contourpy-1.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:523a8ee12edfa36f6d2a49407f705a6ef4c5098de4f498619787e272de93f2d5"}, + {file = "contourpy-1.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece6df05e2c41bd46776fbc712e0996f7c94e0d0543af1656956d150c4ca7c81"}, + {file = "contourpy-1.3.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:573abb30e0e05bf31ed067d2f82500ecfdaec15627a59d63ea2d95714790f5c2"}, + {file = "contourpy-1.3.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9fa36448e6a3a1a9a2ba23c02012c43ed88905ec80163f2ffe2421c7192a5d7"}, + {file = "contourpy-1.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ea9924d28fc5586bf0b42d15f590b10c224117e74409dd7a0be3b62b74a501c"}, + {file = "contourpy-1.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5b75aa69cb4d6f137b36f7eb2ace9280cfb60c55dc5f61c731fdf6f037f958a3"}, + {file = "contourpy-1.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:041b640d4ec01922083645a94bb3b2e777e6b626788f4095cf21abbe266413c1"}, + {file = "contourpy-1.3.1-cp313-cp313-win32.whl", hash = "sha256:36987a15e8ace5f58d4d5da9dca82d498c2bbb28dff6e5d04fbfcc35a9cb3a82"}, + {file = "contourpy-1.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:a7895f46d47671fa7ceec40f31fae721da51ad34bdca0bee83e38870b1f47ffd"}, + {file = "contourpy-1.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:9ddeb796389dadcd884c7eb07bd14ef12408aaae358f0e2ae24114d797eede30"}, + {file = "contourpy-1.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:19c1555a6801c2f084c7ddc1c6e11f02eb6a6016ca1318dd5452ba3f613a1751"}, + {file = "contourpy-1.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:841ad858cff65c2c04bf93875e384ccb82b654574a6d7f30453a04f04af71342"}, + {file = "contourpy-1.3.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4318af1c925fb9a4fb190559ef3eec206845f63e80fb603d47f2d6d67683901c"}, + {file = "contourpy-1.3.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:14c102b0eab282427b662cb590f2e9340a9d91a1c297f48729431f2dcd16e14f"}, + {file = "contourpy-1.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05e806338bfeaa006acbdeba0ad681a10be63b26e1b17317bfac3c5d98f36cda"}, + {file = "contourpy-1.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4d76d5993a34ef3df5181ba3c92fabb93f1eaa5729504fb03423fcd9f3177242"}, + {file = "contourpy-1.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:89785bb2a1980c1bd87f0cb1517a71cde374776a5f150936b82580ae6ead44a1"}, + {file = "contourpy-1.3.1-cp313-cp313t-win32.whl", hash = "sha256:8eb96e79b9f3dcadbad2a3891672f81cdcab7f95b27f28f1c67d75f045b6b4f1"}, + {file = "contourpy-1.3.1-cp313-cp313t-win_amd64.whl", hash = "sha256:287ccc248c9e0d0566934e7d606201abd74761b5703d804ff3df8935f523d546"}, + {file = "contourpy-1.3.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b457d6430833cee8e4b8e9b6f07aa1c161e5e0d52e118dc102c8f9bd7dd060d6"}, + {file = "contourpy-1.3.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb76c1a154b83991a3cbbf0dfeb26ec2833ad56f95540b442c73950af2013750"}, + {file = "contourpy-1.3.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:44a29502ca9c7b5ba389e620d44f2fbe792b1fb5734e8b931ad307071ec58c53"}, + {file = "contourpy-1.3.1.tar.gz", hash = "sha256:dfd97abd83335045a913e3bcc4a09c0ceadbe66580cf573fe961f4a825efa699"}, ] [package.dependencies] @@ -1562,17 +1562,6 @@ files = [ {file = "dirtyjson-1.0.8.tar.gz", hash = "sha256:90ca4a18f3ff30ce849d100dcf4a003953c79d3a2348ef056f1d9c22231a25fd"}, ] -[[package]] -name = "diskcache" -version = "5.6.3" -description = "Disk Cache -- Disk and file backed persistent cache." -optional = false -python-versions = ">=3" -files = [ - {file = "diskcache-5.6.3-py3-none-any.whl", hash = "sha256:5e31b2d5fbad117cc363ebaf6b689474db18a1f6438bc82358b024abd4c2ca19"}, - {file = "diskcache-5.6.3.tar.gz", hash = "sha256:2c3a3fa2743d8535d832ec61c2054a1641f41775aa7c556758a109941e33e4fc"}, -] - [[package]] name = "distlib" version = "0.3.9" @@ -1736,13 +1725,13 @@ tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipyth [[package]] name = "faker" -version = "30.8.2" +version = "32.1.0" description = "Faker is a Python package that generates fake data for you." optional = false python-versions = ">=3.8" files = [ - {file = "Faker-30.8.2-py3-none-any.whl", hash = "sha256:4a82b2908cd19f3bba1a4da2060cc4eb18a40410ccdf9350d071d79dc92fe3ce"}, - {file = "faker-30.8.2.tar.gz", hash = "sha256:aa31b52cdae3673d6a78b4857c7bcdc0e98f201a5cb77d7827fa9e6b5876da94"}, + {file = "Faker-32.1.0-py3-none-any.whl", hash = "sha256:c77522577863c264bdc9dad3a2a750ad3f7ee43ff8185072e482992288898814"}, + {file = "faker-32.1.0.tar.gz", hash = "sha256:aac536ba04e6b7beb2332c67df78485fc29c1880ff723beac6d1efd45e2f10f5"}, ] [package.dependencies] @@ -1762,13 +1751,13 @@ files = [ [[package]] name = "fastapi" -version = "0.115.4" +version = "0.115.5" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" files = [ - {file = "fastapi-0.115.4-py3-none-any.whl", hash = "sha256:0b504a063ffb3cf96a5e27dc1bc32c80ca743a2528574f9cdc77daa2d31b4742"}, - {file = "fastapi-0.115.4.tar.gz", hash = "sha256:db653475586b091cb8b2fec2ac54a680ac6a158e07406e1abae31679e8826349"}, + {file = "fastapi-0.115.5-py3-none-any.whl", hash = "sha256:596b95adbe1474da47049e802f9a65ab2ffa9c2b07e7efee70eb8a66c9f2f796"}, + {file = "fastapi-0.115.5.tar.gz", hash = "sha256:0e7a4d0dc0d01c68df21887cce0945e72d3c48b9f4f79dfe7a7d53aa08fbb289"}, ] [package.dependencies] @@ -1827,6 +1816,17 @@ docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2. testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] typing = ["typing-extensions (>=4.12.2)"] +[[package]] +name = "filetype" +version = "1.2.0" +description = "Infer file type and MIME type of any file/buffer. No external dependencies." +optional = false +python-versions = "*" +files = [ + {file = "filetype-1.2.0-py2.py3-none-any.whl", hash = "sha256:7ce71b6880181241cf7ac8697a2f1eb6a8bd9b429f7ad6d27b8db9ba5f1c2d25"}, + {file = "filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb"}, +] + [[package]] name = "flake8" version = "7.1.1" @@ -2263,13 +2263,13 @@ protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4 [[package]] name = "google-api-core" -version = "2.22.0" +version = "2.23.0" description = "Google API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google_api_core-2.22.0-py3-none-any.whl", hash = "sha256:a6652b6bd51303902494998626653671703c420f6f4c88cfd3f50ed723e9d021"}, - {file = "google_api_core-2.22.0.tar.gz", hash = "sha256:26f8d76b96477db42b55fd02a33aae4a42ec8b86b98b94969b7333a2c828bf35"}, + {file = "google_api_core-2.23.0-py3-none-any.whl", hash = "sha256:c20100d4c4c41070cf365f1d8ddf5365915291b5eb11b83829fbd1c999b5122f"}, + {file = "google_api_core-2.23.0.tar.gz", hash = "sha256:2ceb087315e6af43f256704b871d99326b1f12a9d6ce99beaedec99ba26a0ace"}, ] [package.dependencies] @@ -2292,13 +2292,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" -version = "2.151.0" +version = "2.152.0" description = "Google API Client Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "google_api_python_client-2.151.0-py2.py3-none-any.whl", hash = "sha256:4427b2f47cd88b0355d540c2c52215f68c337f3bc9d6aae1ceeae4525977504c"}, - {file = "google_api_python_client-2.151.0.tar.gz", hash = "sha256:a9d26d630810ed4631aea21d1de3e42072f98240aaf184a8a1a874a371115034"}, + {file = "google_api_python_client-2.152.0-py2.py3-none-any.whl", hash = "sha256:3e429b4584f6ccb620c1c4cdd43a6d78c5a8d307fc64860026aa26e203e8f932"}, + {file = "google_api_python_client-2.152.0.tar.gz", hash = "sha256:9593476dc548fcba794f2ea0e3cfbf927de5a7b79379781c1664637bf246f072"}, ] [package.dependencies] @@ -2366,13 +2366,13 @@ tool = ["click (>=6.0.0)"] [[package]] name = "google-cloud-aiplatform" -version = "1.71.1" +version = "1.72.0" description = "Vertex AI API client library" optional = false python-versions = ">=3.8" files = [ - {file = "google-cloud-aiplatform-1.71.1.tar.gz", hash = "sha256:0013527e06853382ff0885898195bb7f3cf4a70eb7e5d53e4b1a28c8bd1775e2"}, - {file = "google_cloud_aiplatform-1.71.1-py2.py3-none-any.whl", hash = "sha256:4cd49bbc7f8ad88b92029a090b834ebacf9efadc844226f1e74d015d68f69ef5"}, + {file = "google_cloud_aiplatform-1.72.0-py2.py3-none-any.whl", hash = "sha256:a75dbeda47eaecb7bb2b1801b9c8dfe72a14f76a649525cdff496646214a7afb"}, + {file = "google_cloud_aiplatform-1.72.0.tar.gz", hash = "sha256:50611d3d51ff92d80f866e5e0f145daac9d943499c6d715250a9947eca4774f2"}, ] [package.dependencies] @@ -2390,13 +2390,13 @@ shapely = "<3.0.0dev" [package.extras] autologging = ["mlflow (>=1.27.0,<=2.16.0)"] -cloud-profiler = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"] +cloud-profiler = ["tensorboard-plugin-profile (>=2.4.0,<2.18.0)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"] datasets = ["pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)"] endpoint = ["requests (>=2.28.1)"] evaluation = ["pandas (>=1.0.0)", "tqdm (>=4.23.0)"] -full = ["docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.114.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.16.0)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || >=2.33.dev0,<=2.33.0)", "ray[default] (>=2.5,<=2.33.0)", "requests (>=2.28.1)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)"] -langchain = ["langchain (>=0.1.16,<0.3)", "langchain-core (<0.3)", "langchain-google-vertexai (<2)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)", "orjson (<=3.10.6)", "tenacity (<=8.3)"] -langchain-testing = ["absl-py", "cloudpickle (>=3.0,<4.0)", "google-cloud-trace (<2)", "langchain (>=0.1.16,<0.3)", "langchain-core (<0.3)", "langchain-google-vertexai (<2)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "orjson (<=3.10.6)", "pydantic (>=2.6.3,<3)", "pytest-xdist", "tenacity (<=8.3)"] +full = ["docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.114.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.16.0)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || >=2.33.dev0,<=2.33.0)", "ray[default] (>=2.5,<=2.33.0)", "requests (>=2.28.1)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<2.18.0)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)"] +langchain = ["langchain (>=0.1.16,<0.4)", "langchain-core (<0.4)", "langchain-google-vertexai (<3)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)"] +langchain-testing = ["absl-py", "cloudpickle (>=3.0,<4.0)", "google-cloud-trace (<2)", "langchain (>=0.1.16,<0.4)", "langchain-core (<0.4)", "langchain-google-vertexai (<3)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "pydantic (>=2.6.3,<3)", "pytest-xdist"] lit = ["explainable-ai-sdk (>=1.0.0)", "lit-nlp (==0.4.0)", "pandas (>=1.0.0)", "tensorflow (>=2.3.0,<3.0.0dev)"] metadata = ["numpy (>=1.15.0)", "pandas (>=1.0.0)"] pipelines = ["pyyaml (>=5.3.1,<7)"] @@ -2405,21 +2405,21 @@ private-endpoints = ["requests (>=2.28.1)", "urllib3 (>=1.21.1,<1.27)"] ray = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0)", "pyarrow (>=6.0.1)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || >=2.33.dev0,<=2.33.0)", "ray[default] (>=2.5,<=2.33.0)", "setuptools (<70.0.0)"] ray-testing = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0)", "pyarrow (>=6.0.1)", "pytest-xdist", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || >=2.33.dev0,<=2.33.0)", "ray[default] (>=2.5,<=2.33.0)", "ray[train]", "scikit-learn", "setuptools (<70.0.0)", "tensorflow", "torch (>=2.0.0,<2.1.0)", "xgboost", "xgboost-ray"] reasoningengine = ["cloudpickle (>=3.0,<4.0)", "google-cloud-trace (<2)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "pydantic (>=2.6.3,<3)"] -tensorboard = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"] -testing = ["aiohttp", "bigframes", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.114.0)", "google-api-core (>=2.11,<3.0.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "ipython", "kfp (>=2.6.0,<3.0.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.16.0)", "nltk", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pytest-asyncio", "pytest-xdist", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || >=2.33.dev0,<=2.33.0)", "ray[default] (>=2.5,<=2.33.0)", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn", "sentencepiece (>=0.2.0)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (==2.13.0)", "tensorflow (==2.16.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0)", "torch (>=2.2.0)", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost"] +tensorboard = ["tensorboard-plugin-profile (>=2.4.0,<2.18.0)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"] +testing = ["aiohttp", "bigframes", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.114.0)", "google-api-core (>=2.11,<3.0.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "ipython", "kfp (>=2.6.0,<3.0.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.16.0)", "nltk", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pytest-asyncio", "pytest-xdist", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || >=2.33.dev0,<=2.33.0)", "ray[default] (>=2.5,<=2.33.0)", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn", "sentencepiece (>=0.2.0)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<2.18.0)", "tensorflow (==2.13.0)", "tensorflow (==2.16.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0)", "torch (>=2.2.0)", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost"] tokenization = ["sentencepiece (>=0.2.0)"] vizier = ["google-vizier (>=0.1.6)"] xai = ["tensorflow (>=2.3.0,<3.0.0dev)"] [[package]] name = "google-cloud-bigquery" -version = "3.26.0" +version = "3.27.0" description = "Google BigQuery API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google_cloud_bigquery-3.26.0-py2.py3-none-any.whl", hash = "sha256:e0e9ad28afa67a18696e624cbccab284bf2c0a3f6eeb9eeb0426c69b943793a8"}, - {file = "google_cloud_bigquery-3.26.0.tar.gz", hash = "sha256:edbdc788beea659e04c0af7fe4dcd6d9155344b98951a0d5055bd2f15da4ba23"}, + {file = "google_cloud_bigquery-3.27.0-py2.py3-none-any.whl", hash = "sha256:b53b0431e5ba362976a4cd8acce72194b4116cdf8115030c7b339b884603fcc3"}, + {file = "google_cloud_bigquery-3.27.0.tar.gz", hash = "sha256:379c524054d7b090fa56d0c22662cc6e6458a6229b6754c0e7177e3a73421d2c"}, ] [package.dependencies] @@ -2586,13 +2586,13 @@ requests = ["requests (>=2.18.0,<3.0.0dev)"] [[package]] name = "googleapis-common-protos" -version = "1.65.0" +version = "1.66.0" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis_common_protos-1.65.0-py2.py3-none-any.whl", hash = "sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63"}, - {file = "googleapis_common_protos-1.65.0.tar.gz", hash = "sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0"}, + {file = "googleapis_common_protos-1.66.0-py2.py3-none-any.whl", hash = "sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed"}, + {file = "googleapis_common_protos-1.66.0.tar.gz", hash = "sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c"}, ] [package.dependencies] @@ -3062,13 +3062,13 @@ files = [ [[package]] name = "identify" -version = "2.6.1" +version = "2.6.2" description = "File identification library for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0"}, - {file = "identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98"}, + {file = "identify-2.6.2-py2.py3-none-any.whl", hash = "sha256:c097384259f49e372f4ea00a19719d95ae27dd5ff0fd77ad630aa891306b82f3"}, + {file = "identify-2.6.2.tar.gz", hash = "sha256:fab5c716c24d7a789775228823797296a2994b075fb6080ac83a102772a98cbd"}, ] [package.extras] @@ -3266,22 +3266,22 @@ files = [ [[package]] name = "jedi" -version = "0.19.1" +version = "0.19.2" description = "An autocompletion tool for Python that can be used for text editors." optional = false python-versions = ">=3.6" files = [ - {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, - {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, + {file = "jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9"}, + {file = "jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0"}, ] [package.dependencies] -parso = ">=0.8.3,<0.9.0" +parso = ">=0.8.4,<0.9.0" [package.extras] docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<9.0.0)"] [[package]] name = "jinja2" @@ -3302,84 +3302,84 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jiter" -version = "0.7.0" +version = "0.7.1" description = "Fast iterable JSON parser." optional = false python-versions = ">=3.8" files = [ - {file = "jiter-0.7.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:e14027f61101b3f5e173095d9ecf95c1cac03ffe45a849279bde1d97e559e314"}, - {file = "jiter-0.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:979ec4711c2e37ac949561858bd42028884c9799516a923e1ff0b501ef341a4a"}, - {file = "jiter-0.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:662d5d3cca58ad6af7a3c6226b641c8655de5beebcb686bfde0df0f21421aafa"}, - {file = "jiter-0.7.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1d89008fb47043a469f97ad90840b97ba54e7c3d62dc7cbb6cbf938bd0caf71d"}, - {file = "jiter-0.7.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a8b16c35c846a323ce9067170d5ab8c31ea3dbcab59c4f7608bbbf20c2c3b43f"}, - {file = "jiter-0.7.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c9e82daaa1b0a68704f9029b81e664a5a9de3e466c2cbaabcda5875f961702e7"}, - {file = "jiter-0.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43a87a9f586636e1f0dd3651a91f79b491ea0d9fd7cbbf4f5c463eebdc48bda7"}, - {file = "jiter-0.7.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2ec05b1615f96cc3e4901678bc863958611584072967d9962f9e571d60711d52"}, - {file = "jiter-0.7.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a5cb97e35370bde7aa0d232a7f910f5a0fbbc96bc0a7dbaa044fd5cd6bcd7ec3"}, - {file = "jiter-0.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cb316dacaf48c8c187cea75d0d7f835f299137e6fdd13f691dff8f92914015c7"}, - {file = "jiter-0.7.0-cp310-none-win32.whl", hash = "sha256:243f38eb4072763c54de95b14ad283610e0cd3bf26393870db04e520f60eebb3"}, - {file = "jiter-0.7.0-cp310-none-win_amd64.whl", hash = "sha256:2221d5603c139f6764c54e37e7c6960c469cbcd76928fb10d15023ba5903f94b"}, - {file = "jiter-0.7.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:91cec0ad755bd786c9f769ce8d843af955df6a8e56b17658771b2d5cb34a3ff8"}, - {file = "jiter-0.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:feba70a28a27d962e353e978dbb6afd798e711c04cb0b4c5e77e9d3779033a1a"}, - {file = "jiter-0.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9d866ec066c3616cacb8535dbda38bb1d470b17b25f0317c4540182bc886ce2"}, - {file = "jiter-0.7.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8e7a7a00b6f9f18289dd563596f97ecaba6c777501a8ba04bf98e03087bcbc60"}, - {file = "jiter-0.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9aaf564094c7db8687f2660605e099f3d3e6ea5e7135498486674fcb78e29165"}, - {file = "jiter-0.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4d27e09825c1b3c7a667adb500ce8b840e8fc9f630da8454b44cdd4fb0081bb"}, - {file = "jiter-0.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ca7c287da9c1d56dda88da1d08855a787dbb09a7e2bd13c66a2e288700bd7c7"}, - {file = "jiter-0.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:db19a6d160f093cbc8cd5ea2abad420b686f6c0e5fb4f7b41941ebc6a4f83cda"}, - {file = "jiter-0.7.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e46a63c7f877cf7441ffc821c28287cfb9f533ae6ed707bde15e7d4dfafa7ae"}, - {file = "jiter-0.7.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ba426fa7ff21cb119fa544b75dd3fbee6a70e55a5829709c0338d07ccd30e6d"}, - {file = "jiter-0.7.0-cp311-none-win32.whl", hash = "sha256:c07f55a64912b0c7982377831210836d2ea92b7bd343fca67a32212dd72e38e0"}, - {file = "jiter-0.7.0-cp311-none-win_amd64.whl", hash = "sha256:ed27b2c43e1b5f6c7fedc5c11d4d8bfa627de42d1143d87e39e2e83ddefd861a"}, - {file = "jiter-0.7.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ac7930bcaaeb1e229e35c91c04ed2e9f39025b86ee9fc3141706bbf6fff4aeeb"}, - {file = "jiter-0.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:571feae3e7c901a8eedde9fd2865b0dfc1432fb15cab8c675a8444f7d11b7c5d"}, - {file = "jiter-0.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8af4df8a262fa2778b68c2a03b6e9d1cb4d43d02bea6976d46be77a3a331af1"}, - {file = "jiter-0.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd028d4165097a611eb0c7494d8c1f2aebd46f73ca3200f02a175a9c9a6f22f5"}, - {file = "jiter-0.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6b487247c7836810091e9455efe56a52ec51bfa3a222237e1587d04d3e04527"}, - {file = "jiter-0.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e6d28a92f28814e1a9f2824dc11f4e17e1df1f44dc4fdeb94c5450d34bcb2602"}, - {file = "jiter-0.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90443994bbafe134f0b34201dad3ebe1c769f0599004084e046fb249ad912425"}, - {file = "jiter-0.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f9abf464f9faac652542ce8360cea8e68fba2b78350e8a170248f9bcc228702a"}, - {file = "jiter-0.7.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db7a8d99fc5f842f7d2852f06ccaed066532292c41723e5dff670c339b649f88"}, - {file = "jiter-0.7.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:15cf691ebd8693b70c94627d6b748f01e6d697d9a6e9f2bc310934fcfb7cf25e"}, - {file = "jiter-0.7.0-cp312-none-win32.whl", hash = "sha256:9dcd54fa422fb66ca398bec296fed5f58e756aa0589496011cfea2abb5be38a5"}, - {file = "jiter-0.7.0-cp312-none-win_amd64.whl", hash = "sha256:cc989951f73f9375b8eacd571baaa057f3d7d11b7ce6f67b9d54642e7475bfad"}, - {file = "jiter-0.7.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:24cecd18df540963cd27c08ca5ce1d0179f229ff78066d9eecbe5add29361340"}, - {file = "jiter-0.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d41b46236b90b043cca73785674c23d2a67d16f226394079d0953f94e765ed76"}, - {file = "jiter-0.7.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b160db0987171365c153e406a45dcab0ee613ae3508a77bfff42515cb4ce4d6e"}, - {file = "jiter-0.7.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d1c8d91e0f0bd78602eaa081332e8ee4f512c000716f5bc54e9a037306d693a7"}, - {file = "jiter-0.7.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:997706c683195eeff192d2e5285ce64d2a610414f37da3a3f2625dcf8517cf90"}, - {file = "jiter-0.7.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ea52a8a0ff0229ab2920284079becd2bae0688d432fca94857ece83bb49c541"}, - {file = "jiter-0.7.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d77449d2738cf74752bb35d75ee431af457e741124d1db5e112890023572c7c"}, - {file = "jiter-0.7.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a8203519907a1d81d6cb00902c98e27c2d0bf25ce0323c50ca594d30f5f1fbcf"}, - {file = "jiter-0.7.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41d15ccc53931c822dd7f1aebf09faa3cda2d7b48a76ef304c7dbc19d1302e51"}, - {file = "jiter-0.7.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:febf3179b2fabf71fbd2fd52acb8594163bb173348b388649567a548f356dbf6"}, - {file = "jiter-0.7.0-cp313-none-win32.whl", hash = "sha256:4a8e2d866e7eda19f012444e01b55079d8e1c4c30346aaac4b97e80c54e2d6d3"}, - {file = "jiter-0.7.0-cp313-none-win_amd64.whl", hash = "sha256:7417c2b928062c496f381fb0cb50412eee5ad1d8b53dbc0e011ce45bb2de522c"}, - {file = "jiter-0.7.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:9c62c737b5368e51e74960a08fe1adc807bd270227291daede78db24d5fbf556"}, - {file = "jiter-0.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e4640722b1bef0f6e342fe4606aafaae0eb4f4be5c84355bb6867f34400f6688"}, - {file = "jiter-0.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f367488c3b9453eab285424c61098faa1cab37bb49425e69c8dca34f2dfe7d69"}, - {file = "jiter-0.7.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0cf5d42beb3514236459454e3287db53d9c4d56c4ebaa3e9d0efe81b19495129"}, - {file = "jiter-0.7.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cc5190ea1113ee6f7252fa8a5fe5a6515422e378356c950a03bbde5cafbdbaab"}, - {file = "jiter-0.7.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:63ee47a149d698796a87abe445fc8dee21ed880f09469700c76c8d84e0d11efd"}, - {file = "jiter-0.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48592c26ea72d3e71aa4bea0a93454df907d80638c3046bb0705507b6704c0d7"}, - {file = "jiter-0.7.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:79fef541199bd91cfe8a74529ecccb8eaf1aca38ad899ea582ebbd4854af1e51"}, - {file = "jiter-0.7.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d1ef6bb66041f2514739240568136c81b9dcc64fd14a43691c17ea793b6535c0"}, - {file = "jiter-0.7.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aca4d950863b1c238e315bf159466e064c98743eef3bd0ff9617e48ff63a4715"}, - {file = "jiter-0.7.0-cp38-none-win32.whl", hash = "sha256:897745f230350dcedb8d1ebe53e33568d48ea122c25e6784402b6e4e88169be7"}, - {file = "jiter-0.7.0-cp38-none-win_amd64.whl", hash = "sha256:b928c76a422ef3d0c85c5e98c498ce3421b313c5246199541e125b52953e1bc0"}, - {file = "jiter-0.7.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c9b669ff6f8ba08270dee9ccf858d3b0203b42314a428a1676762f2d390fbb64"}, - {file = "jiter-0.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b5be919bacd73ca93801c3042bce6e95cb9c555a45ca83617b9b6c89df03b9c2"}, - {file = "jiter-0.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a282e1e8a396dabcea82d64f9d05acf7efcf81ecdd925b967020dcb0e671c103"}, - {file = "jiter-0.7.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:17ecb1a578a56e97a043c72b463776b5ea30343125308f667fb8fce4b3796735"}, - {file = "jiter-0.7.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7b6045fa0527129218cdcd8a8b839f678219686055f31ebab35f87d354d9c36e"}, - {file = "jiter-0.7.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:189cc4262a92e33c19d4fd24018f5890e4e6da5b2581f0059938877943f8298c"}, - {file = "jiter-0.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c138414839effbf30d185e30475c6dc8a16411a1e3681e5fd4605ab1233ac67a"}, - {file = "jiter-0.7.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2791604acef33da6b72d5ecf885a32384bcaf9aa1e4be32737f3b8b9588eef6a"}, - {file = "jiter-0.7.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ae60ec89037a78d60bbf3d8b127f1567769c8fa24886e0abed3f622791dea478"}, - {file = "jiter-0.7.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:836f03dea312967635233d826f783309b98cfd9ccc76ac776e224cfcef577862"}, - {file = "jiter-0.7.0-cp39-none-win32.whl", hash = "sha256:ebc30ae2ce4bc4986e1764c404b4ea1924f926abf02ce92516485098f8545374"}, - {file = "jiter-0.7.0-cp39-none-win_amd64.whl", hash = "sha256:abf596f951370c648f37aa9899deab296c42a3829736e598b0dd10b08f77a44d"}, - {file = "jiter-0.7.0.tar.gz", hash = "sha256:c061d9738535497b5509f8970584f20de1e900806b239a39a9994fc191dad630"}, + {file = "jiter-0.7.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:262e96d06696b673fad6f257e6a0abb6e873dc22818ca0e0600f4a1189eb334f"}, + {file = "jiter-0.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be6de02939aac5be97eb437f45cfd279b1dc9de358b13ea6e040e63a3221c40d"}, + {file = "jiter-0.7.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935f10b802bc1ce2b2f61843e498c7720aa7f4e4bb7797aa8121eab017293c3d"}, + {file = "jiter-0.7.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9cd3cccccabf5064e4bb3099c87bf67db94f805c1e62d1aefd2b7476e90e0ee2"}, + {file = "jiter-0.7.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4aa919ebfc5f7b027cc368fe3964c0015e1963b92e1db382419dadb098a05192"}, + {file = "jiter-0.7.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ae2d01e82c94491ce4d6f461a837f63b6c4e6dd5bb082553a70c509034ff3d4"}, + {file = "jiter-0.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f9568cd66dbbdab67ae1b4c99f3f7da1228c5682d65913e3f5f95586b3cb9a9"}, + {file = "jiter-0.7.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9ecbf4e20ec2c26512736284dc1a3f8ed79b6ca7188e3b99032757ad48db97dc"}, + {file = "jiter-0.7.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b1a0508fddc70ce00b872e463b387d49308ef02b0787992ca471c8d4ba1c0fa1"}, + {file = "jiter-0.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f84c9996664c460f24213ff1e5881530abd8fafd82058d39af3682d5fd2d6316"}, + {file = "jiter-0.7.1-cp310-none-win32.whl", hash = "sha256:c915e1a1960976ba4dfe06551ea87063b2d5b4d30759012210099e712a414d9f"}, + {file = "jiter-0.7.1-cp310-none-win_amd64.whl", hash = "sha256:75bf3b7fdc5c0faa6ffffcf8028a1f974d126bac86d96490d1b51b3210aa0f3f"}, + {file = "jiter-0.7.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ad04a23a91f3d10d69d6c87a5f4471b61c2c5cd6e112e85136594a02043f462c"}, + {file = "jiter-0.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e47a554de88dff701226bb5722b7f1b6bccd0b98f1748459b7e56acac2707a5"}, + {file = "jiter-0.7.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e44fff69c814a2e96a20b4ecee3e2365e9b15cf5fe4e00869d18396daa91dab"}, + {file = "jiter-0.7.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:df0a1d05081541b45743c965436f8b5a1048d6fd726e4a030113a2699a6046ea"}, + {file = "jiter-0.7.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f22cf8f236a645cb6d8ffe2a64edb5d2b66fb148bf7c75eea0cb36d17014a7bc"}, + {file = "jiter-0.7.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8589f50b728ea4bf22e0632eefa125c8aa9c38ed202a5ee6ca371f05eeb3ff"}, + {file = "jiter-0.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f20de711224f2ca2dbb166a8d512f6ff48c9c38cc06b51f796520eb4722cc2ce"}, + {file = "jiter-0.7.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8a9803396032117b85ec8cbf008a54590644a062fedd0425cbdb95e4b2b60479"}, + {file = "jiter-0.7.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3d8bae77c82741032e9d89a4026479061aba6e646de3bf5f2fc1ae2bbd9d06e0"}, + {file = "jiter-0.7.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3dc9939e576bbc68c813fc82f6620353ed68c194c7bcf3d58dc822591ec12490"}, + {file = "jiter-0.7.1-cp311-none-win32.whl", hash = "sha256:f7605d24cd6fab156ec89e7924578e21604feee9c4f1e9da34d8b67f63e54892"}, + {file = "jiter-0.7.1-cp311-none-win_amd64.whl", hash = "sha256:f3ea649e7751a1a29ea5ecc03c4ada0a833846c59c6da75d747899f9b48b7282"}, + {file = "jiter-0.7.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ad36a1155cbd92e7a084a568f7dc6023497df781adf2390c345dd77a120905ca"}, + {file = "jiter-0.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7ba52e6aaed2dc5c81a3d9b5e4ab95b039c4592c66ac973879ba57c3506492bb"}, + {file = "jiter-0.7.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b7de0b6f6728b678540c7927587e23f715284596724be203af952418acb8a2d"}, + {file = "jiter-0.7.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9463b62bd53c2fb85529c700c6a3beb2ee54fde8bef714b150601616dcb184a6"}, + {file = "jiter-0.7.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:627164ec01d28af56e1f549da84caf0fe06da3880ebc7b7ee1ca15df106ae172"}, + {file = "jiter-0.7.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:25d0e5bf64e368b0aa9e0a559c3ab2f9b67e35fe7269e8a0d81f48bbd10e8963"}, + {file = "jiter-0.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c244261306f08f8008b3087059601997016549cb8bb23cf4317a4827f07b7d74"}, + {file = "jiter-0.7.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7ded4e4b75b68b843b7cea5cd7c55f738c20e1394c68c2cb10adb655526c5f1b"}, + {file = "jiter-0.7.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:80dae4f1889b9d09e5f4de6b58c490d9c8ce7730e35e0b8643ab62b1538f095c"}, + {file = "jiter-0.7.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5970cf8ec943b51bce7f4b98d2e1ed3ada170c2a789e2db3cb484486591a176a"}, + {file = "jiter-0.7.1-cp312-none-win32.whl", hash = "sha256:701d90220d6ecb3125d46853c8ca8a5bc158de8c49af60fd706475a49fee157e"}, + {file = "jiter-0.7.1-cp312-none-win_amd64.whl", hash = "sha256:7824c3ecf9ecf3321c37f4e4d4411aad49c666ee5bc2a937071bdd80917e4533"}, + {file = "jiter-0.7.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:097676a37778ba3c80cb53f34abd6943ceb0848263c21bf423ae98b090f6c6ba"}, + {file = "jiter-0.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3298af506d4271257c0a8f48668b0f47048d69351675dd8500f22420d4eec378"}, + {file = "jiter-0.7.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12fd88cfe6067e2199964839c19bd2b422ca3fd792949b8f44bb8a4e7d21946a"}, + {file = "jiter-0.7.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dacca921efcd21939123c8ea8883a54b9fa7f6545c8019ffcf4f762985b6d0c8"}, + {file = "jiter-0.7.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de3674a5fe1f6713a746d25ad9c32cd32fadc824e64b9d6159b3b34fd9134143"}, + {file = "jiter-0.7.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65df9dbae6d67e0788a05b4bad5706ad40f6f911e0137eb416b9eead6ba6f044"}, + {file = "jiter-0.7.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ba9a358d59a0a55cccaa4957e6ae10b1a25ffdabda863c0343c51817610501d"}, + {file = "jiter-0.7.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:576eb0f0c6207e9ede2b11ec01d9c2182973986514f9c60bc3b3b5d5798c8f50"}, + {file = "jiter-0.7.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:e550e29cdf3577d2c970a18f3959e6b8646fd60ef1b0507e5947dc73703b5627"}, + {file = "jiter-0.7.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:81d968dbf3ce0db2e0e4dec6b0a0d5d94f846ee84caf779b07cab49f5325ae43"}, + {file = "jiter-0.7.1-cp313-none-win32.whl", hash = "sha256:f892e547e6e79a1506eb571a676cf2f480a4533675f834e9ae98de84f9b941ac"}, + {file = "jiter-0.7.1-cp313-none-win_amd64.whl", hash = "sha256:0302f0940b1455b2a7fb0409b8d5b31183db70d2b07fd177906d83bf941385d1"}, + {file = "jiter-0.7.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:c65a3ce72b679958b79d556473f192a4dfc5895e8cc1030c9f4e434690906076"}, + {file = "jiter-0.7.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e80052d3db39f9bb8eb86d207a1be3d9ecee5e05fdec31380817f9609ad38e60"}, + {file = "jiter-0.7.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70a497859c4f3f7acd71c8bd89a6f9cf753ebacacf5e3e799138b8e1843084e3"}, + {file = "jiter-0.7.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c1288bc22b9e36854a0536ba83666c3b1fb066b811019d7b682c9cf0269cdf9f"}, + {file = "jiter-0.7.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b096ca72dd38ef35675e1d3b01785874315182243ef7aea9752cb62266ad516f"}, + {file = "jiter-0.7.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8dbbd52c50b605af13dbee1a08373c520e6fcc6b5d32f17738875847fea4e2cd"}, + {file = "jiter-0.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af29c5c6eb2517e71ffa15c7ae9509fa5e833ec2a99319ac88cc271eca865519"}, + {file = "jiter-0.7.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f114a4df1e40c03c0efbf974b376ed57756a1141eb27d04baee0680c5af3d424"}, + {file = "jiter-0.7.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:191fbaee7cf46a9dd9b817547bf556facde50f83199d07fc48ebeff4082f9df4"}, + {file = "jiter-0.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0e2b445e5ee627fb4ee6bbceeb486251e60a0c881a8e12398dfdff47c56f0723"}, + {file = "jiter-0.7.1-cp38-none-win32.whl", hash = "sha256:47ac4c3cf8135c83e64755b7276339b26cd3c7ddadf9e67306ace4832b283edf"}, + {file = "jiter-0.7.1-cp38-none-win_amd64.whl", hash = "sha256:60b49c245cd90cde4794f5c30f123ee06ccf42fb8730a019a2870cd005653ebd"}, + {file = "jiter-0.7.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:8f212eeacc7203256f526f550d105d8efa24605828382cd7d296b703181ff11d"}, + {file = "jiter-0.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d9e247079d88c00e75e297e6cb3a18a039ebcd79fefc43be9ba4eb7fb43eb726"}, + {file = "jiter-0.7.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0aacaa56360139c53dcf352992b0331f4057a0373bbffd43f64ba0c32d2d155"}, + {file = "jiter-0.7.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc1b55314ca97dbb6c48d9144323896e9c1a25d41c65bcb9550b3e0c270ca560"}, + {file = "jiter-0.7.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f281aae41b47e90deb70e7386558e877a8e62e1693e0086f37d015fa1c102289"}, + {file = "jiter-0.7.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:93c20d2730a84d43f7c0b6fb2579dc54335db742a59cf9776d0b80e99d587382"}, + {file = "jiter-0.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e81ccccd8069110e150613496deafa10da2f6ff322a707cbec2b0d52a87b9671"}, + {file = "jiter-0.7.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0a7d5e85766eff4c9be481d77e2226b4c259999cb6862ccac5ef6621d3c8dcce"}, + {file = "jiter-0.7.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f52ce5799df5b6975439ecb16b1e879d7655e1685b6e3758c9b1b97696313bfb"}, + {file = "jiter-0.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0c91a0304373fdf97d56f88356a010bba442e6d995eb7773cbe32885b71cdd8"}, + {file = "jiter-0.7.1-cp39-none-win32.whl", hash = "sha256:5c08adf93e41ce2755970e8aa95262298afe2bf58897fb9653c47cd93c3c6cdc"}, + {file = "jiter-0.7.1-cp39-none-win_amd64.whl", hash = "sha256:6592f4067c74176e5f369228fb2995ed01400c9e8e1225fb73417183a5e635f0"}, + {file = "jiter-0.7.1.tar.gz", hash = "sha256:448cf4f74f7363c34cdef26214da527e8eeffd88ba06d0b80b485ad0667baf5d"}, ] [[package]] @@ -3417,15 +3417,18 @@ files = [ [[package]] name = "json5" -version = "0.9.25" +version = "0.9.28" description = "A Python implementation of the JSON5 data format." optional = false -python-versions = ">=3.8" +python-versions = ">=3.8.0" files = [ - {file = "json5-0.9.25-py3-none-any.whl", hash = "sha256:34ed7d834b1341a86987ed52f3f76cd8ee184394906b6e22a1e0deb9ab294e8f"}, - {file = "json5-0.9.25.tar.gz", hash = "sha256:548e41b9be043f9426776f05df8635a00fe06104ea51ed24b67f908856e151ae"}, + {file = "json5-0.9.28-py3-none-any.whl", hash = "sha256:29c56f1accdd8bc2e037321237662034a7e07921e2b7223281a5ce2c46f0c4df"}, + {file = "json5-0.9.28.tar.gz", hash = "sha256:1f82f36e615bc5b42f1bbd49dbc94b12563c56408c6ffa06414ea310890e9a6e"}, ] +[package.extras] +dev = ["build (==1.2.2.post1)", "coverage (==7.5.3)", "mypy (==1.13.0)", "pip (==24.3.1)", "pylint (==3.2.3)", "ruff (==0.7.3)", "twine (==5.1.1)", "uv (==0.5.1)"] + [[package]] name = "jsonpointer" version = "3.0.0" @@ -3945,13 +3948,13 @@ types-tqdm = "*" [[package]] name = "litellm" -version = "1.52.5" +version = "1.52.6" description = "Library to easily interface with LLM API providers" optional = false python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8" files = [ - {file = "litellm-1.52.5-py3-none-any.whl", hash = "sha256:38c0f30a849b80c99cfc56f96c4c7563d5ced83f08fd7fc2129011ddc4414ac5"}, - {file = "litellm-1.52.5.tar.gz", hash = "sha256:9708c02983c7ed22fc18c96e167bf1c4ed9672de397d413e7957c216dfc911e6"}, + {file = "litellm-1.52.6-py3-none-any.whl", hash = "sha256:9b3e9fb51f7e2a3cc8b50997b346c55aae9435a138d9a656f18e262750a1bfe1"}, + {file = "litellm-1.52.6.tar.gz", hash = "sha256:d67c653f97bd07f503b975c167de1e25632b7bc6bb3c008c46921e4acc81ec60"}, ] [package.dependencies] @@ -3973,13 +3976,13 @@ proxy = ["PyJWT (>=2.8.0,<3.0.0)", "apscheduler (>=3.10.4,<4.0.0)", "backoff", " [[package]] name = "llama-cloud" -version = "0.1.4" +version = "0.1.5" description = "" optional = false python-versions = "<4,>=3.8" files = [ - {file = "llama_cloud-0.1.4-py3-none-any.whl", hash = "sha256:cfca6c4e0a87468b922d732f0f313a2ecd3a8e0bf74382ee80829ce49dcbc5e0"}, - {file = "llama_cloud-0.1.4.tar.gz", hash = "sha256:6f0155979bd96160951cb812c48836f1face037bc79ccfd8d185b18ef4c9faf8"}, + {file = "llama_cloud-0.1.5-py3-none-any.whl", hash = "sha256:15605022520d04bd6ef6a46c0cbde833f301d652286d34fca02b4c44e2a7a2aa"}, + {file = "llama_cloud-0.1.5.tar.gz", hash = "sha256:8ce1db36754a6a46c8511561dbc040a2e89ba4ca1cf4edfb6ce382a5240f6cb6"}, ] [package.dependencies] @@ -3988,19 +3991,19 @@ pydantic = ">=1.10" [[package]] name = "llama-index" -version = "0.11.22" +version = "0.11.23" description = "Interface between LLMs and your data" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "llama_index-0.11.22-py3-none-any.whl", hash = "sha256:bda98d925dfbab4b76c07cc61b59bb5920e15e685efd9fbf3a0cd33f1f465f10"}, - {file = "llama_index-0.11.22.tar.gz", hash = "sha256:8d8a7838a7fcc733fc7a262ef3709df001c3021cb42843c8e9da8d244e5355e1"}, + {file = "llama_index-0.11.23-py3-none-any.whl", hash = "sha256:08513b86fed8e4295b52dc204c193e7186a2e65fc09d581deeb6a74afaaf74fe"}, + {file = "llama_index-0.11.23.tar.gz", hash = "sha256:e02118dfe060568cf40c9ed109c16d559911b2e19eac5e6babc04ed6b57ede02"}, ] [package.dependencies] llama-index-agent-openai = ">=0.3.4,<0.4.0" llama-index-cli = ">=0.3.1,<0.4.0" -llama-index-core = ">=0.11.22,<0.12.0" +llama-index-core = ">=0.11.23,<0.12.0" llama-index-embeddings-openai = ">=0.2.4,<0.3.0" llama-index-indices-managed-llama-cloud = ">=0.3.0" llama-index-legacy = ">=0.9.48,<0.10.0" @@ -4008,7 +4011,7 @@ llama-index-llms-openai = ">=0.2.10,<0.3.0" llama-index-multi-modal-llms-openai = ">=0.2.0,<0.3.0" llama-index-program-openai = ">=0.2.0,<0.3.0" llama-index-question-gen-openai = ">=0.2.0,<0.3.0" -llama-index-readers-file = ">=0.2.0,<0.3.0" +llama-index-readers-file = ">=0.3.0,<0.4.0" llama-index-readers-llama-parse = ">=0.3.0" nltk = ">3.8.1" @@ -4046,13 +4049,13 @@ llama-index-llms-openai = ">=0.2.0,<0.3.0" [[package]] name = "llama-index-core" -version = "0.11.22" +version = "0.11.23" description = "Interface between LLMs and your data" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "llama_index_core-0.11.22-py3-none-any.whl", hash = "sha256:5c59d95dec9bb0727f25b03de89392c69076b2e4aaa6acbd8773de1f07502e9e"}, - {file = "llama_index_core-0.11.22.tar.gz", hash = "sha256:ddc30b9c873495de40ad8278d0c894ba09f32f6aa7fc638012b1b22b74c32553"}, + {file = "llama_index_core-0.11.23-py3-none-any.whl", hash = "sha256:25a0cb4a055bfb348655ca4acd1b475529bd8537a7b81874ef14ed13f56e06c1"}, + {file = "llama_index_core-0.11.23.tar.gz", hash = "sha256:e150859696a0eae169fe19323f46e9a31af2c12c3182012e4d0353ea8eb06d24"}, ] [package.dependencies] @@ -4060,6 +4063,7 @@ aiohttp = ">=3.8.6,<4.0.0" dataclasses-json = "*" deprecated = ">=1.2.9.3" dirtyjson = ">=1.0.8,<2.0.0" +filetype = ">=1.2.0,<2.0.0" fsspec = ">=2023.5.0" httpx = "*" nest-asyncio = ">=1.5.8,<2.0.0" @@ -4142,17 +4146,17 @@ openai = ">=1.1.0" [[package]] name = "llama-index-indices-managed-llama-cloud" -version = "0.4.0" +version = "0.4.2" description = "llama-index indices llama-cloud integration" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "llama_index_indices_managed_llama_cloud-0.4.0-py3-none-any.whl", hash = "sha256:c2c54821f1bf17a7810e6c013fbe7ddfef4154b7e5b100f7bf8673098f8004e4"}, - {file = "llama_index_indices_managed_llama_cloud-0.4.0.tar.gz", hash = "sha256:fbebff7876a219b6ab96892ae7c432a9299195fab8f67d4a4a0ebf6da210b242"}, + {file = "llama_index_indices_managed_llama_cloud-0.4.2-py3-none-any.whl", hash = "sha256:def86ba39810248f07dc006fd373a3a2eede5249f358eee0cb2351f9faa63243"}, + {file = "llama_index_indices_managed_llama_cloud-0.4.2.tar.gz", hash = "sha256:4e27b8f825aff04f80776e915a9f2c238e2f35c7bc72684185279161d051a3f9"}, ] [package.dependencies] -llama-cloud = ">=0.0.11" +llama-cloud = ">=0.1.5" llama-index-core = ">=0.11.13.post1,<0.12.0" [[package]] @@ -4275,20 +4279,20 @@ llama-index-program-openai = ">=0.2.0,<0.3.0" [[package]] name = "llama-index-readers-file" -version = "0.2.2" +version = "0.3.0" description = "llama-index readers file integration" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "llama_index_readers_file-0.2.2-py3-none-any.whl", hash = "sha256:ffec878771c1e7575afb742887561059bcca77b97a81c1c1be310ebb73f10f46"}, - {file = "llama_index_readers_file-0.2.2.tar.gz", hash = "sha256:48459f90960b863737147b66ed83afec9ce8984f8eda2561b6d2500214365db2"}, + {file = "llama_index_readers_file-0.3.0-py3-none-any.whl", hash = "sha256:259ee195ea8285f8421b381ec3a64f5fae68de69435970ae64e544f07a4403ad"}, + {file = "llama_index_readers_file-0.3.0.tar.gz", hash = "sha256:6c675fcd2f0599a131f89e1c5ed3521dde31338a9b724a7721f5dfd7243ea8d8"}, ] [package.dependencies] beautifulsoup4 = ">=4.12.3,<5.0.0" llama-index-core = ">=0.11.0,<0.12.0" pandas = "*" -pypdf = ">=4.0.1,<5.0.0" +pypdf = ">=5.1.0,<6.0.0" striprtf = ">=0.0.26,<0.0.27" [package.extras] @@ -4326,13 +4330,13 @@ llama-index-core = ">=0.11.0,<0.12.0" [[package]] name = "llama-parse" -version = "0.5.13" +version = "0.5.14" description = "Parse files into RAG-Optimized formats." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "llama_parse-0.5.13-py3-none-any.whl", hash = "sha256:d7f14549a5a6a5944f06372a7244c7683209a4a946a41844467a1d6eb5bbf066"}, - {file = "llama_parse-0.5.13.tar.gz", hash = "sha256:80fe0e6f184ca4fae642d15ccfa927771c04f518bd160b084ed789201469e768"}, + {file = "llama_parse-0.5.14-py3-none-any.whl", hash = "sha256:64a46825598a239cd7066df7bd81f1f952ae7b3e26ad98f0e34aa9625ebcfd72"}, + {file = "llama_parse-0.5.14.tar.gz", hash = "sha256:b90510f58774d6ee73b6275fe4f1aa0ff3a306026df29959606e56073384248c"}, ] [package.dependencies] @@ -5126,13 +5130,13 @@ files = [ [[package]] name = "narwhals" -version = "1.13.3" +version = "1.13.5" description = "Extremely lightweight compatibility layer between dataframe libraries" optional = false python-versions = ">=3.8" files = [ - {file = "narwhals-1.13.3-py3-none-any.whl", hash = "sha256:cde49b59b4540885d822777b747ed3fad65632b3d34648040308afcf08e62547"}, - {file = "narwhals-1.13.3.tar.gz", hash = "sha256:db95cb5b5a6b99bad9fe7f2e2dacf937d57dee1c76c4544d4354a324084e36b5"}, + {file = "narwhals-1.13.5-py3-none-any.whl", hash = "sha256:91fe95ffdece9e3837780b6cd32f4309a41f39b285bc9d42d60eaff47d48b39a"}, + {file = "narwhals-1.13.5.tar.gz", hash = "sha256:2e71b70895759af455a83583052bb9dbada9f72efad786d8d1b2f38078054e73"}, ] [package.extras] @@ -5599,13 +5603,13 @@ sympy = "*" [[package]] name = "openai" -version = "1.54.3" +version = "1.54.4" description = "The official Python library for the openai API" optional = false python-versions = ">=3.8" files = [ - {file = "openai-1.54.3-py3-none-any.whl", hash = "sha256:f18dbaf09c50d70c4185b892a2a553f80681d1d866323a2da7f7be2f688615d5"}, - {file = "openai-1.54.3.tar.gz", hash = "sha256:7511b74eeb894ac0b0253dc71f087a15d2e4d71d22d0088767205143d880cca6"}, + {file = "openai-1.54.4-py3-none-any.whl", hash = "sha256:0d95cef99346bf9b6d7fbf57faf61a673924c3e34fa8af84c9ffe04660673a7e"}, + {file = "openai-1.54.4.tar.gz", hash = "sha256:50f3656e45401c54e973fa05dc29f3f0b0d19348d685b2f7ddb4d92bf7b1b6bf"}, ] [package.dependencies] @@ -5640,28 +5644,6 @@ files = [ [package.dependencies] numpy = {version = ">=1.26.0", markers = "python_version >= \"3.12\""} -[[package]] -name = "openhands-aci" -version = "0.1.0" -description = "An Agent-Computer Interface (ACI) designed for software development agents OpenHands." -optional = false -python-versions = "<4.0,>=3.12" -files = [ - {file = "openhands_aci-0.1.0-py3-none-any.whl", hash = "sha256:f28e5a32e394d1e643f79bf8af27fe44d039cb71729d590f9f3ee0c23c075f00"}, - {file = "openhands_aci-0.1.0.tar.gz", hash = "sha256:babc55f516efbb27eb7e528662e14b75c902965c48a110408fda824b83ea4461"}, -] - -[package.dependencies] -diskcache = ">=5.6.3,<6.0.0" -gitpython = "*" -grep-ast = "0.3.3" -litellm = "*" -networkx = "*" -numpy = "*" -pandas = "*" -scipy = "*" -tree-sitter = "0.21.3" - [[package]] name = "opentelemetry-api" version = "1.25.0" @@ -6545,7 +6527,6 @@ description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs optional = false python-versions = ">=3.8" files = [ - {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, ] @@ -6556,7 +6537,6 @@ description = "A collection of ASN.1-based protocols modules" optional = false python-versions = ">=3.8" files = [ - {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, ] @@ -6811,25 +6791,6 @@ files = [ {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, ] -[[package]] -name = "pygithub" -version = "2.5.0" -description = "Use the full Github API v3" -optional = false -python-versions = ">=3.8" -files = [ - {file = "PyGithub-2.5.0-py3-none-any.whl", hash = "sha256:b0b635999a658ab8e08720bdd3318893ff20e2275f6446fcf35bf3f44f2c0fd2"}, - {file = "pygithub-2.5.0.tar.gz", hash = "sha256:e1613ac508a9be710920d26eb18b1905ebd9926aa49398e88151c1b526aad3cf"}, -] - -[package.dependencies] -Deprecated = "*" -pyjwt = {version = ">=2.4.0", extras = ["crypto"]} -pynacl = ">=1.4.0" -requests = ">=2.14.0" -typing-extensions = ">=4.0.0" -urllib3 = ">=1.26.0" - [[package]] name = "pygments" version = "2.18.0" @@ -6894,32 +6855,6 @@ files = [ [package.dependencies] pybind11 = ">=2.2" -[[package]] -name = "pynacl" -version = "1.5.0" -description = "Python binding to the Networking and Cryptography (NaCl) library" -optional = false -python-versions = ">=3.6" -files = [ - {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"}, - {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"}, - {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394"}, - {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d"}, - {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858"}, - {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b"}, - {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff"}, - {file = "PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543"}, - {file = "PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93"}, - {file = "PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"}, -] - -[package.dependencies] -cffi = ">=1.4.1" - -[package.extras] -docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] -tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] - [[package]] name = "pyparsing" version = "3.2.0" @@ -6936,20 +6871,21 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pypdf" -version = "4.3.1" +version = "5.1.0" description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pypdf-4.3.1-py3-none-any.whl", hash = "sha256:64b31da97eda0771ef22edb1bfecd5deee4b72c3d1736b7df2689805076d6418"}, - {file = "pypdf-4.3.1.tar.gz", hash = "sha256:b2f37fe9a3030aa97ca86067a56ba3f9d3565f9a791b305c7355d8392c30d91b"}, + {file = "pypdf-5.1.0-py3-none-any.whl", hash = "sha256:3bd4f503f4ebc58bae40d81e81a9176c400cbbac2ba2d877367595fb524dfdfc"}, + {file = "pypdf-5.1.0.tar.gz", hash = "sha256:425a129abb1614183fd1aca6982f650b47f8026867c0ce7c4b9f281c443d2740"}, ] [package.extras] -crypto = ["PyCryptodome", "cryptography"] +crypto = ["cryptography"] +cryptodome = ["PyCryptodome"] dev = ["black", "flit", "pip-tools", "pre-commit (<2.18.0)", "pytest-cov", "pytest-socket", "pytest-timeout", "pytest-xdist", "wheel"] docs = ["myst_parser", "sphinx", "sphinx_rtd_theme"] -full = ["Pillow (>=8.0.0)", "PyCryptodome", "cryptography"] +full = ["Pillow (>=8.0.0)", "cryptography"] image = ["Pillow (>=8.0.0)"] [[package]] @@ -7151,6 +7087,25 @@ files = [ [package.extras] cli = ["click (>=5.0)"] +[[package]] +name = "python-engineio" +version = "4.10.1" +description = "Engine.IO server and client for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "python_engineio-4.10.1-py3-none-any.whl", hash = "sha256:445a94004ec8034960ab99e7ce4209ec619c6e6b6a12aedcb05abeab924025c0"}, + {file = "python_engineio-4.10.1.tar.gz", hash = "sha256:166cea8dd7429638c5c4e3a4895beae95196e860bc6f29ed0b9fe753d1ef2072"}, +] + +[package.dependencies] +simple-websocket = ">=0.10.0" + +[package.extras] +asyncio-client = ["aiohttp (>=3.4)"] +client = ["requests (>=2.21.0)", "websocket-client (>=0.54.0)"] +docs = ["sphinx"] + [[package]] name = "python-frontmatter" version = "1.1.0" @@ -7208,6 +7163,26 @@ Pillow = ">=3.3.2" typing-extensions = ">=4.9.0" XlsxWriter = ">=0.5.7" +[[package]] +name = "python-socketio" +version = "5.11.4" +description = "Socket.IO server and client for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python_socketio-5.11.4-py3-none-any.whl", hash = "sha256:42efaa3e3e0b166fc72a527488a13caaac2cefc76174252486503bd496284945"}, + {file = "python_socketio-5.11.4.tar.gz", hash = "sha256:8b0b8ff2964b2957c865835e936310190639c00310a47d77321a594d1665355e"}, +] + +[package.dependencies] +bidict = ">=0.21.0" +python-engineio = ">=4.8.0" + +[package.extras] +asyncio-client = ["aiohttp (>=3.4)"] +client = ["requests (>=2.21.0)", "websocket-client (>=0.54.0)"] +docs = ["sphinx"] + [[package]] name = "pytz" version = "2024.2" @@ -7820,25 +7795,6 @@ files = [ {file = "ruff-0.7.1.tar.gz", hash = "sha256:9d8a41d4aa2dad1575adb98a82870cf5db5f76b2938cf2206c22c940034a36f4"}, ] -[[package]] -name = "runloop-api-client" -version = "0.7.0" -description = "The official Python library for the runloop API" -optional = false -python-versions = ">=3.7" -files = [ - {file = "runloop_api_client-0.7.0-py3-none-any.whl", hash = "sha256:3c3744e212fedeb36a12d5164e241152f5a8c8c5b59cbade39b81ec36a7d0905"}, - {file = "runloop_api_client-0.7.0.tar.gz", hash = "sha256:d1c2373775f426460665a2c0d6fc150f6525ac9ccceeff534fb1c554d1de2353"}, -] - -[package.dependencies] -anyio = ">=3.5.0,<5" -distro = ">=1.7.0,<2" -httpx = ">=0.23.0,<1" -pydantic = ">=1.9.0,<3" -sniffio = "*" -typing-extensions = ">=4.7,<5" - [[package]] name = "s3transfer" version = "0.10.3" @@ -8073,11 +8029,6 @@ files = [ {file = "scikit_learn-1.5.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f60021ec1574e56632be2a36b946f8143bf4e5e6af4a06d85281adc22938e0dd"}, {file = "scikit_learn-1.5.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:394397841449853c2290a32050382edaec3da89e35b3e03d6cc966aebc6a8ae6"}, {file = "scikit_learn-1.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:57cc1786cfd6bd118220a92ede80270132aa353647684efa385a74244a41e3b1"}, - {file = "scikit_learn-1.5.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9a702e2de732bbb20d3bad29ebd77fc05a6b427dc49964300340e4c9328b3f5"}, - {file = "scikit_learn-1.5.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:b0768ad641981f5d3a198430a1d31c3e044ed2e8a6f22166b4d546a5116d7908"}, - {file = "scikit_learn-1.5.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:178ddd0a5cb0044464fc1bfc4cca5b1833bfc7bb022d70b05db8530da4bb3dd3"}, - {file = "scikit_learn-1.5.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7284ade780084d94505632241bf78c44ab3b6f1e8ccab3d2af58e0e950f9c12"}, - {file = "scikit_learn-1.5.2-cp313-cp313-win_amd64.whl", hash = "sha256:b7b0f9a0b1040830d38c39b91b3a44e1b643f4b36e36567b80b7c6bd2202a27f"}, {file = "scikit_learn-1.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:757c7d514ddb00ae249832fe87100d9c73c6ea91423802872d9e74970a0e40b9"}, {file = "scikit_learn-1.5.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:52788f48b5d8bca5c0736c175fa6bdaab2ef00a8f536cda698db61bd89c551c1"}, {file = "scikit_learn-1.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:643964678f4b5fbdc95cbf8aec638acc7aa70f5f79ee2cdad1eec3df4ba6ead8"}, @@ -8190,13 +8141,13 @@ win32 = ["pywin32"] [[package]] name = "sentence-transformers" -version = "3.2.1" +version = "3.3.0" description = "State-of-the-Art Text Embeddings" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "sentence_transformers-3.2.1-py3-none-any.whl", hash = "sha256:c507e069eea33d15f1f2c72f74d7ea93abef298152cc235ab5af5e3a7584f738"}, - {file = "sentence_transformers-3.2.1.tar.gz", hash = "sha256:9fc38e620e5e1beba31d538a451778c9ccdbad77119d90f59f5bce49c4148e79"}, + {file = "sentence_transformers-3.3.0-py3-none-any.whl", hash = "sha256:5897c376fde1fea5f22a90ead2612278a464e52b8e42f1af95f84092c36bc23c"}, + {file = "sentence_transformers-3.3.0.tar.gz", hash = "sha256:b91f0aea4ada72ed5a7cdbe8a6245a7152d0d9f84f336383778f8568e406b008"}, ] [package.dependencies] @@ -8209,7 +8160,7 @@ tqdm = "*" transformers = ">=4.41.0,<5.0.0" [package.extras] -dev = ["accelerate (>=0.20.3)", "datasets", "pre-commit", "pytest", "pytest-cov"] +dev = ["accelerate (>=0.20.3)", "datasets", "peft", "pre-commit", "pytest", "pytest-cov"] onnx = ["optimum[onnxruntime] (>=1.23.1)"] onnx-gpu = ["optimum[onnxruntime-gpu] (>=1.23.1)"] openvino = ["optimum-intel[openvino] (>=1.20.0)"] @@ -8217,23 +8168,23 @@ train = ["accelerate (>=0.20.3)", "datasets"] [[package]] name = "setuptools" -version = "75.3.0" +version = "75.5.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "setuptools-75.3.0-py3-none-any.whl", hash = "sha256:f2504966861356aa38616760c0f66568e535562374995367b4e69c7143cf6bcd"}, - {file = "setuptools-75.3.0.tar.gz", hash = "sha256:fba5dd4d766e97be1b1681d98712680ae8f2f26d7881245f2ce9e40714f1a686"}, + {file = "setuptools-75.5.0-py3-none-any.whl", hash = "sha256:87cb777c3b96d638ca02031192d40390e0ad97737e27b6b4fa831bea86f2f829"}, + {file = "setuptools-75.5.0.tar.gz", hash = "sha256:5c4ccb41111392671f02bb5f8436dfc5a9a7185e80500531b133f5775c4163ef"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.7.0)"] +core = ["importlib-metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.12.*)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.12,<1.14)", "pytest-mypy"] [[package]] name = "shapely" @@ -8322,6 +8273,24 @@ attrs = "*" test = ["coverage", "mock", "repeated-test (>=2.2.1)", "sphinx"] tests = ["coverage", "mock", "repeated-test (>=2.2.1)", "sphinx"] +[[package]] +name = "simple-websocket" +version = "1.1.0" +description = "Simple WebSocket server and client for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "simple_websocket-1.1.0-py3-none-any.whl", hash = "sha256:4af6069630a38ed6c561010f0e11a5bc0d4ca569b36306eb257cd9a192497c8c"}, + {file = "simple_websocket-1.1.0.tar.gz", hash = "sha256:7939234e7aa067c534abdab3a9ed933ec9ce4691b0713c78acb195560aa52ae4"}, +] + +[package.dependencies] +wsproto = "*" + +[package.extras] +dev = ["flake8", "pytest", "pytest-cov", "tox"] +docs = ["sphinx"] + [[package]] name = "six" version = "1.16.0" @@ -8499,13 +8468,13 @@ full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7 [[package]] name = "streamlit" -version = "1.40.0" +version = "1.40.1" description = "A faster way to build and share data apps" optional = false python-versions = "!=3.9.7,>=3.8" files = [ - {file = "streamlit-1.40.0-py2.py3-none-any.whl", hash = "sha256:05d22bc111d682ef4deaf7ededeec2305051b99dd6d7d564788705e4ce6f8029"}, - {file = "streamlit-1.40.0.tar.gz", hash = "sha256:6e4d3b90c4934951f97d790daf7953df5beb2916e447ac9f78e1b76a9ef83327"}, + {file = "streamlit-1.40.1-py2.py3-none-any.whl", hash = "sha256:b9d7a317a0cc88edd7857c7e07dde9cf95647d3ae51cbfa8a3db82fbb8a2990d"}, + {file = "streamlit-1.40.1.tar.gz", hash = "sha256:1f2b09f04b6ad366a2c7b4d48104697d1c8bc33f48bdf7ed939cc04c12d3aec6"}, ] [package.dependencies] @@ -8527,7 +8496,7 @@ tenacity = ">=8.1.0,<10" toml = ">=0.10.1,<2" tornado = ">=6.0.3,<7" typing-extensions = ">=4.3.0,<5" -watchdog = {version = ">=2.1.5,<6", markers = "platform_system != \"Darwin\""} +watchdog = {version = ">=2.1.5,<7", markers = "platform_system != \"Darwin\""} [package.extras] snowflake = ["snowflake-connector-python (>=2.8.0)", "snowflake-snowpark-python[modin] (>=1.17.0)"] @@ -9488,41 +9457,41 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [[package]] name = "watchdog" -version = "5.0.3" +version = "6.0.0" description = "Filesystem events monitoring" optional = false python-versions = ">=3.9" files = [ - {file = "watchdog-5.0.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:85527b882f3facda0579bce9d743ff7f10c3e1e0db0a0d0e28170a7d0e5ce2ea"}, - {file = "watchdog-5.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:53adf73dcdc0ef04f7735066b4a57a4cd3e49ef135daae41d77395f0b5b692cb"}, - {file = "watchdog-5.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e25adddab85f674acac303cf1f5835951345a56c5f7f582987d266679979c75b"}, - {file = "watchdog-5.0.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f01f4a3565a387080dc49bdd1fefe4ecc77f894991b88ef927edbfa45eb10818"}, - {file = "watchdog-5.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:91b522adc25614cdeaf91f7897800b82c13b4b8ac68a42ca959f992f6990c490"}, - {file = "watchdog-5.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d52db5beb5e476e6853da2e2d24dbbbed6797b449c8bf7ea118a4ee0d2c9040e"}, - {file = "watchdog-5.0.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:94d11b07c64f63f49876e0ab8042ae034674c8653bfcdaa8c4b32e71cfff87e8"}, - {file = "watchdog-5.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:349c9488e1d85d0a58e8cb14222d2c51cbc801ce11ac3936ab4c3af986536926"}, - {file = "watchdog-5.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:53a3f10b62c2d569e260f96e8d966463dec1a50fa4f1b22aec69e3f91025060e"}, - {file = "watchdog-5.0.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:950f531ec6e03696a2414b6308f5c6ff9dab7821a768c9d5788b1314e9a46ca7"}, - {file = "watchdog-5.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ae6deb336cba5d71476caa029ceb6e88047fc1dc74b62b7c4012639c0b563906"}, - {file = "watchdog-5.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1021223c08ba8d2d38d71ec1704496471ffd7be42cfb26b87cd5059323a389a1"}, - {file = "watchdog-5.0.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:752fb40efc7cc8d88ebc332b8f4bcbe2b5cc7e881bccfeb8e25054c00c994ee3"}, - {file = "watchdog-5.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a2e8f3f955d68471fa37b0e3add18500790d129cc7efe89971b8a4cc6fdeb0b2"}, - {file = "watchdog-5.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b8ca4d854adcf480bdfd80f46fdd6fb49f91dd020ae11c89b3a79e19454ec627"}, - {file = "watchdog-5.0.3-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:90a67d7857adb1d985aca232cc9905dd5bc4803ed85cfcdcfcf707e52049eda7"}, - {file = "watchdog-5.0.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:720ef9d3a4f9ca575a780af283c8fd3a0674b307651c1976714745090da5a9e8"}, - {file = "watchdog-5.0.3-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:223160bb359281bb8e31c8f1068bf71a6b16a8ad3d9524ca6f523ac666bb6a1e"}, - {file = "watchdog-5.0.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:560135542c91eaa74247a2e8430cf83c4342b29e8ad4f520ae14f0c8a19cfb5b"}, - {file = "watchdog-5.0.3-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dd021efa85970bd4824acacbb922066159d0f9e546389a4743d56919b6758b91"}, - {file = "watchdog-5.0.3-py3-none-manylinux2014_armv7l.whl", hash = "sha256:78864cc8f23dbee55be34cc1494632a7ba30263951b5b2e8fc8286b95845f82c"}, - {file = "watchdog-5.0.3-py3-none-manylinux2014_i686.whl", hash = "sha256:1e9679245e3ea6498494b3028b90c7b25dbb2abe65c7d07423ecfc2d6218ff7c"}, - {file = "watchdog-5.0.3-py3-none-manylinux2014_ppc64.whl", hash = "sha256:9413384f26b5d050b6978e6fcd0c1e7f0539be7a4f1a885061473c5deaa57221"}, - {file = "watchdog-5.0.3-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:294b7a598974b8e2c6123d19ef15de9abcd282b0fbbdbc4d23dfa812959a9e05"}, - {file = "watchdog-5.0.3-py3-none-manylinux2014_s390x.whl", hash = "sha256:26dd201857d702bdf9d78c273cafcab5871dd29343748524695cecffa44a8d97"}, - {file = "watchdog-5.0.3-py3-none-manylinux2014_x86_64.whl", hash = "sha256:0f9332243355643d567697c3e3fa07330a1d1abf981611654a1f2bf2175612b7"}, - {file = "watchdog-5.0.3-py3-none-win32.whl", hash = "sha256:c66f80ee5b602a9c7ab66e3c9f36026590a0902db3aea414d59a2f55188c1f49"}, - {file = "watchdog-5.0.3-py3-none-win_amd64.whl", hash = "sha256:f00b4cf737f568be9665563347a910f8bdc76f88c2970121c86243c8cfdf90e9"}, - {file = "watchdog-5.0.3-py3-none-win_ia64.whl", hash = "sha256:49f4d36cb315c25ea0d946e018c01bb028048023b9e103d3d3943f58e109dd45"}, - {file = "watchdog-5.0.3.tar.gz", hash = "sha256:108f42a7f0345042a854d4d0ad0834b741d421330d5f575b81cb27b883500176"}, + {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d1cdb490583ebd691c012b3d6dae011000fe42edb7a82ece80965b42abd61f26"}, + {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc64ab3bdb6a04d69d4023b29422170b74681784ffb9463ed4870cf2f3e66112"}, + {file = "watchdog-6.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c897ac1b55c5a1461e16dae288d22bb2e412ba9807df8397a635d88f671d36c3"}, + {file = "watchdog-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6eb11feb5a0d452ee41f824e271ca311a09e250441c262ca2fd7ebcf2461a06c"}, + {file = "watchdog-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef810fbf7b781a5a593894e4f439773830bdecb885e6880d957d5b9382a960d2"}, + {file = "watchdog-6.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afd0fe1b2270917c5e23c2a65ce50c2a4abb63daafb0d419fde368e272a76b7c"}, + {file = "watchdog-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdd4e6f14b8b18c334febb9c4425a878a2ac20efd1e0b231978e7b150f92a948"}, + {file = "watchdog-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c7c15dda13c4eb00d6fb6fc508b3c0ed88b9d5d374056b239c4ad1611125c860"}, + {file = "watchdog-6.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f10cb2d5902447c7d0da897e2c6768bca89174d0c6e1e30abec5421af97a5b0"}, + {file = "watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c"}, + {file = "watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134"}, + {file = "watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b"}, + {file = "watchdog-6.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e6f0e77c9417e7cd62af82529b10563db3423625c5fce018430b249bf977f9e8"}, + {file = "watchdog-6.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:90c8e78f3b94014f7aaae121e6b909674df5b46ec24d6bebc45c44c56729af2a"}, + {file = "watchdog-6.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e7631a77ffb1f7d2eefa4445ebbee491c720a5661ddf6df3498ebecae5ed375c"}, + {file = "watchdog-6.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c7ac31a19f4545dd92fc25d200694098f42c9a8e391bc00bdd362c5736dbf881"}, + {file = "watchdog-6.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9513f27a1a582d9808cf21a07dae516f0fab1cf2d7683a742c498b93eedabb11"}, + {file = "watchdog-6.0.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7a0e56874cfbc4b9b05c60c8a1926fedf56324bb08cfbc188969777940aef3aa"}, + {file = "watchdog-6.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e6439e374fc012255b4ec786ae3c4bc838cd7309a540e5fe0952d03687d8804e"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2"}, + {file = "watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a"}, + {file = "watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680"}, + {file = "watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f"}, + {file = "watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282"}, ] [package.extras] @@ -9636,19 +9605,15 @@ files = [ [[package]] name = "webcolors" -version = "24.8.0" +version = "24.11.1" description = "A library for working with the color formats defined by HTML and CSS." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "webcolors-24.8.0-py3-none-any.whl", hash = "sha256:fc4c3b59358ada164552084a8ebee637c221e4059267d0f8325b3b560f6c7f0a"}, - {file = "webcolors-24.8.0.tar.gz", hash = "sha256:08b07af286a01bcd30d583a7acadf629583d1f79bfef27dd2c2c5c263817277d"}, + {file = "webcolors-24.11.1-py3-none-any.whl", hash = "sha256:515291393b4cdf0eb19c155749a096f779f7d909f7cceea072791cb9095b92e9"}, + {file = "webcolors-24.11.1.tar.gz", hash = "sha256:ecb3d768f32202af770477b8b65f318fa4f566c22948673a977b00d589dd80f6"}, ] -[package.extras] -docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinxext-opengraph"] -tests = ["coverage[toml]"] - [[package]] name = "webencodings" version = "0.5.1" @@ -9678,97 +9643,80 @@ test = ["websockets"] [[package]] name = "websockets" -version = "13.1" +version = "14.1" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "websockets-13.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f48c749857f8fb598fb890a75f540e3221d0976ed0bf879cf3c7eef34151acee"}, - {file = "websockets-13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c7e72ce6bda6fb9409cc1e8164dd41d7c91466fb599eb047cfda72fe758a34a7"}, - {file = "websockets-13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f779498eeec470295a2b1a5d97aa1bc9814ecd25e1eb637bd9d1c73a327387f6"}, - {file = "websockets-13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676df3fe46956fbb0437d8800cd5f2b6d41143b6e7e842e60554398432cf29b"}, - {file = "websockets-13.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7affedeb43a70351bb811dadf49493c9cfd1ed94c9c70095fd177e9cc1541fa"}, - {file = "websockets-13.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1971e62d2caa443e57588e1d82d15f663b29ff9dfe7446d9964a4b6f12c1e700"}, - {file = "websockets-13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5f2e75431f8dc4a47f31565a6e1355fb4f2ecaa99d6b89737527ea917066e26c"}, - {file = "websockets-13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:58cf7e75dbf7e566088b07e36ea2e3e2bd5676e22216e4cad108d4df4a7402a0"}, - {file = "websockets-13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c90d6dec6be2c7d03378a574de87af9b1efea77d0c52a8301dd831ece938452f"}, - {file = "websockets-13.1-cp310-cp310-win32.whl", hash = "sha256:730f42125ccb14602f455155084f978bd9e8e57e89b569b4d7f0f0c17a448ffe"}, - {file = "websockets-13.1-cp310-cp310-win_amd64.whl", hash = "sha256:5993260f483d05a9737073be197371940c01b257cc45ae3f1d5d7adb371b266a"}, - {file = "websockets-13.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:61fc0dfcda609cda0fc9fe7977694c0c59cf9d749fbb17f4e9483929e3c48a19"}, - {file = "websockets-13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ceec59f59d092c5007e815def4ebb80c2de330e9588e101cf8bd94c143ec78a5"}, - {file = "websockets-13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c1dca61c6db1166c48b95198c0b7d9c990b30c756fc2923cc66f68d17dc558fd"}, - {file = "websockets-13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:308e20f22c2c77f3f39caca508e765f8725020b84aa963474e18c59accbf4c02"}, - {file = "websockets-13.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62d516c325e6540e8a57b94abefc3459d7dab8ce52ac75c96cad5549e187e3a7"}, - {file = "websockets-13.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c6e35319b46b99e168eb98472d6c7d8634ee37750d7693656dc766395df096"}, - {file = "websockets-13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5f9fee94ebafbc3117c30be1844ed01a3b177bb6e39088bc6b2fa1dc15572084"}, - {file = "websockets-13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7c1e90228c2f5cdde263253fa5db63e6653f1c00e7ec64108065a0b9713fa1b3"}, - {file = "websockets-13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6548f29b0e401eea2b967b2fdc1c7c7b5ebb3eeb470ed23a54cd45ef078a0db9"}, - {file = "websockets-13.1-cp311-cp311-win32.whl", hash = "sha256:c11d4d16e133f6df8916cc5b7e3e96ee4c44c936717d684a94f48f82edb7c92f"}, - {file = "websockets-13.1-cp311-cp311-win_amd64.whl", hash = "sha256:d04f13a1d75cb2b8382bdc16ae6fa58c97337253826dfe136195b7f89f661557"}, - {file = "websockets-13.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9d75baf00138f80b48f1eac72ad1535aac0b6461265a0bcad391fc5aba875cfc"}, - {file = "websockets-13.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9b6f347deb3dcfbfde1c20baa21c2ac0751afaa73e64e5b693bb2b848efeaa49"}, - {file = "websockets-13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de58647e3f9c42f13f90ac7e5f58900c80a39019848c5547bc691693098ae1bd"}, - {file = "websockets-13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1b54689e38d1279a51d11e3467dd2f3a50f5f2e879012ce8f2d6943f00e83f0"}, - {file = "websockets-13.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf1781ef73c073e6b0f90af841aaf98501f975d306bbf6221683dd594ccc52b6"}, - {file = "websockets-13.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d23b88b9388ed85c6faf0e74d8dec4f4d3baf3ecf20a65a47b836d56260d4b9"}, - {file = "websockets-13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3c78383585f47ccb0fcf186dcb8a43f5438bd7d8f47d69e0b56f71bf431a0a68"}, - {file = "websockets-13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d6d300f8ec35c24025ceb9b9019ae9040c1ab2f01cddc2bcc0b518af31c75c14"}, - {file = "websockets-13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a9dcaf8b0cc72a392760bb8755922c03e17a5a54e08cca58e8b74f6902b433cf"}, - {file = "websockets-13.1-cp312-cp312-win32.whl", hash = "sha256:2f85cf4f2a1ba8f602298a853cec8526c2ca42a9a4b947ec236eaedb8f2dc80c"}, - {file = "websockets-13.1-cp312-cp312-win_amd64.whl", hash = "sha256:38377f8b0cdeee97c552d20cf1865695fcd56aba155ad1b4ca8779a5b6ef4ac3"}, - {file = "websockets-13.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a9ab1e71d3d2e54a0aa646ab6d4eebfaa5f416fe78dfe4da2839525dc5d765c6"}, - {file = "websockets-13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b9d7439d7fab4dce00570bb906875734df13d9faa4b48e261c440a5fec6d9708"}, - {file = "websockets-13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:327b74e915cf13c5931334c61e1a41040e365d380f812513a255aa804b183418"}, - {file = "websockets-13.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:325b1ccdbf5e5725fdcb1b0e9ad4d2545056479d0eee392c291c1bf76206435a"}, - {file = "websockets-13.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:346bee67a65f189e0e33f520f253d5147ab76ae42493804319b5716e46dddf0f"}, - {file = "websockets-13.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91a0fa841646320ec0d3accdff5b757b06e2e5c86ba32af2e0815c96c7a603c5"}, - {file = "websockets-13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:18503d2c5f3943e93819238bf20df71982d193f73dcecd26c94514f417f6b135"}, - {file = "websockets-13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a9cd1af7e18e5221d2878378fbc287a14cd527fdd5939ed56a18df8a31136bb2"}, - {file = "websockets-13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:70c5be9f416aa72aab7a2a76c90ae0a4fe2755c1816c153c1a2bcc3333ce4ce6"}, - {file = "websockets-13.1-cp313-cp313-win32.whl", hash = "sha256:624459daabeb310d3815b276c1adef475b3e6804abaf2d9d2c061c319f7f187d"}, - {file = "websockets-13.1-cp313-cp313-win_amd64.whl", hash = "sha256:c518e84bb59c2baae725accd355c8dc517b4a3ed8db88b4bc93c78dae2974bf2"}, - {file = "websockets-13.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c7934fd0e920e70468e676fe7f1b7261c1efa0d6c037c6722278ca0228ad9d0d"}, - {file = "websockets-13.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:149e622dc48c10ccc3d2760e5f36753db9cacf3ad7bc7bbbfd7d9c819e286f23"}, - {file = "websockets-13.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a569eb1b05d72f9bce2ebd28a1ce2054311b66677fcd46cf36204ad23acead8c"}, - {file = "websockets-13.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95df24ca1e1bd93bbca51d94dd049a984609687cb2fb08a7f2c56ac84e9816ea"}, - {file = "websockets-13.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8dbb1bf0c0a4ae8b40bdc9be7f644e2f3fb4e8a9aca7145bfa510d4a374eeb7"}, - {file = "websockets-13.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:035233b7531fb92a76beefcbf479504db8c72eb3bff41da55aecce3a0f729e54"}, - {file = "websockets-13.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:e4450fc83a3df53dec45922b576e91e94f5578d06436871dce3a6be38e40f5db"}, - {file = "websockets-13.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:463e1c6ec853202dd3657f156123d6b4dad0c546ea2e2e38be2b3f7c5b8e7295"}, - {file = "websockets-13.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6d6855bbe70119872c05107e38fbc7f96b1d8cb047d95c2c50869a46c65a8e96"}, - {file = "websockets-13.1-cp38-cp38-win32.whl", hash = "sha256:204e5107f43095012b00f1451374693267adbb832d29966a01ecc4ce1db26faf"}, - {file = "websockets-13.1-cp38-cp38-win_amd64.whl", hash = "sha256:485307243237328c022bc908b90e4457d0daa8b5cf4b3723fd3c4a8012fce4c6"}, - {file = "websockets-13.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9b37c184f8b976f0c0a231a5f3d6efe10807d41ccbe4488df8c74174805eea7d"}, - {file = "websockets-13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:163e7277e1a0bd9fb3c8842a71661ad19c6aa7bb3d6678dc7f89b17fbcc4aeb7"}, - {file = "websockets-13.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4b889dbd1342820cc210ba44307cf75ae5f2f96226c0038094455a96e64fb07a"}, - {file = "websockets-13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:586a356928692c1fed0eca68b4d1c2cbbd1ca2acf2ac7e7ebd3b9052582deefa"}, - {file = "websockets-13.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7bd6abf1e070a6b72bfeb71049d6ad286852e285f146682bf30d0296f5fbadfa"}, - {file = "websockets-13.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2aad13a200e5934f5a6767492fb07151e1de1d6079c003ab31e1823733ae79"}, - {file = "websockets-13.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:df01aea34b6e9e33572c35cd16bae5a47785e7d5c8cb2b54b2acdb9678315a17"}, - {file = "websockets-13.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e54affdeb21026329fb0744ad187cf812f7d3c2aa702a5edb562b325191fcab6"}, - {file = "websockets-13.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9ef8aa8bdbac47f4968a5d66462a2a0935d044bf35c0e5a8af152d58516dbeb5"}, - {file = "websockets-13.1-cp39-cp39-win32.whl", hash = "sha256:deeb929efe52bed518f6eb2ddc00cc496366a14c726005726ad62c2dd9017a3c"}, - {file = "websockets-13.1-cp39-cp39-win_amd64.whl", hash = "sha256:7c65ffa900e7cc958cd088b9a9157a8141c991f8c53d11087e6fb7277a03f81d"}, - {file = "websockets-13.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5dd6da9bec02735931fccec99d97c29f47cc61f644264eb995ad6c0c27667238"}, - {file = "websockets-13.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:2510c09d8e8df777177ee3d40cd35450dc169a81e747455cc4197e63f7e7bfe5"}, - {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1c3cf67185543730888b20682fb186fc8d0fa6f07ccc3ef4390831ab4b388d9"}, - {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcc03c8b72267e97b49149e4863d57c2d77f13fae12066622dc78fe322490fe6"}, - {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:004280a140f220c812e65f36944a9ca92d766b6cc4560be652a0a3883a79ed8a"}, - {file = "websockets-13.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e2620453c075abeb0daa949a292e19f56de518988e079c36478bacf9546ced23"}, - {file = "websockets-13.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9156c45750b37337f7b0b00e6248991a047be4aa44554c9886fe6bdd605aab3b"}, - {file = "websockets-13.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:80c421e07973a89fbdd93e6f2003c17d20b69010458d3a8e37fb47874bd67d51"}, - {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82d0ba76371769d6a4e56f7e83bb8e81846d17a6190971e38b5de108bde9b0d7"}, - {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9875a0143f07d74dc5e1ded1c4581f0d9f7ab86c78994e2ed9e95050073c94d"}, - {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a11e38ad8922c7961447f35c7b17bffa15de4d17c70abd07bfbe12d6faa3e027"}, - {file = "websockets-13.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4059f790b6ae8768471cddb65d3c4fe4792b0ab48e154c9f0a04cefaabcd5978"}, - {file = "websockets-13.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:25c35bf84bf7c7369d247f0b8cfa157f989862c49104c5cf85cb5436a641d93e"}, - {file = "websockets-13.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:83f91d8a9bb404b8c2c41a707ac7f7f75b9442a0a876df295de27251a856ad09"}, - {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a43cfdcddd07f4ca2b1afb459824dd3c6d53a51410636a2c7fc97b9a8cf4842"}, - {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48a2ef1381632a2f0cb4efeff34efa97901c9fbc118e01951ad7cfc10601a9bb"}, - {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:459bf774c754c35dbb487360b12c5727adab887f1622b8aed5755880a21c4a20"}, - {file = "websockets-13.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:95858ca14a9f6fa8413d29e0a585b31b278388aa775b8a81fa24830123874678"}, - {file = "websockets-13.1-py3-none-any.whl", hash = "sha256:a9a396a6ad26130cdae92ae10c36af09d9bfe6cafe69670fd3b6da9b07b4044f"}, - {file = "websockets-13.1.tar.gz", hash = "sha256:a3b3366087c1bc0a2795111edcadddb8b3b59509d5db5d7ea3fdd69f954a8878"}, + {file = "websockets-14.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a0adf84bc2e7c86e8a202537b4fd50e6f7f0e4a6b6bf64d7ccb96c4cd3330b29"}, + {file = "websockets-14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90b5d9dfbb6d07a84ed3e696012610b6da074d97453bd01e0e30744b472c8179"}, + {file = "websockets-14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2177ee3901075167f01c5e335a6685e71b162a54a89a56001f1c3e9e3d2ad250"}, + {file = "websockets-14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f14a96a0034a27f9d47fd9788913924c89612225878f8078bb9d55f859272b0"}, + {file = "websockets-14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f874ba705deea77bcf64a9da42c1f5fc2466d8f14daf410bc7d4ceae0a9fcb0"}, + {file = "websockets-14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9607b9a442392e690a57909c362811184ea429585a71061cd5d3c2b98065c199"}, + {file = "websockets-14.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bea45f19b7ca000380fbd4e02552be86343080120d074b87f25593ce1700ad58"}, + {file = "websockets-14.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:219c8187b3ceeadbf2afcf0f25a4918d02da7b944d703b97d12fb01510869078"}, + {file = "websockets-14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ad2ab2547761d79926effe63de21479dfaf29834c50f98c4bf5b5480b5838434"}, + {file = "websockets-14.1-cp310-cp310-win32.whl", hash = "sha256:1288369a6a84e81b90da5dbed48610cd7e5d60af62df9851ed1d1d23a9069f10"}, + {file = "websockets-14.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0744623852f1497d825a49a99bfbec9bea4f3f946df6eb9d8a2f0c37a2fec2e"}, + {file = "websockets-14.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:449d77d636f8d9c17952628cc7e3b8faf6e92a17ec581ec0c0256300717e1512"}, + {file = "websockets-14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a35f704be14768cea9790d921c2c1cc4fc52700410b1c10948511039be824aac"}, + {file = "websockets-14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b1f3628a0510bd58968c0f60447e7a692933589b791a6b572fcef374053ca280"}, + {file = "websockets-14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c3deac3748ec73ef24fc7be0b68220d14d47d6647d2f85b2771cb35ea847aa1"}, + {file = "websockets-14.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7048eb4415d46368ef29d32133134c513f507fff7d953c18c91104738a68c3b3"}, + {file = "websockets-14.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6cf0ad281c979306a6a34242b371e90e891bce504509fb6bb5246bbbf31e7b6"}, + {file = "websockets-14.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cc1fc87428c1d18b643479caa7b15db7d544652e5bf610513d4a3478dbe823d0"}, + {file = "websockets-14.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f95ba34d71e2fa0c5d225bde3b3bdb152e957150100e75c86bc7f3964c450d89"}, + {file = "websockets-14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9481a6de29105d73cf4515f2bef8eb71e17ac184c19d0b9918a3701c6c9c4f23"}, + {file = "websockets-14.1-cp311-cp311-win32.whl", hash = "sha256:368a05465f49c5949e27afd6fbe0a77ce53082185bbb2ac096a3a8afaf4de52e"}, + {file = "websockets-14.1-cp311-cp311-win_amd64.whl", hash = "sha256:6d24fc337fc055c9e83414c94e1ee0dee902a486d19d2a7f0929e49d7d604b09"}, + {file = "websockets-14.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ed907449fe5e021933e46a3e65d651f641975a768d0649fee59f10c2985529ed"}, + {file = "websockets-14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:87e31011b5c14a33b29f17eb48932e63e1dcd3fa31d72209848652310d3d1f0d"}, + {file = "websockets-14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bc6ccf7d54c02ae47a48ddf9414c54d48af9c01076a2e1023e3b486b6e72c707"}, + {file = "websockets-14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9777564c0a72a1d457f0848977a1cbe15cfa75fa2f67ce267441e465717dcf1a"}, + {file = "websockets-14.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a655bde548ca98f55b43711b0ceefd2a88a71af6350b0c168aa77562104f3f45"}, + {file = "websockets-14.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3dfff83ca578cada2d19e665e9c8368e1598d4e787422a460ec70e531dbdd58"}, + {file = "websockets-14.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6a6c9bcf7cdc0fd41cc7b7944447982e8acfd9f0d560ea6d6845428ed0562058"}, + {file = "websockets-14.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4b6caec8576e760f2c7dd878ba817653144d5f369200b6ddf9771d64385b84d4"}, + {file = "websockets-14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eb6d38971c800ff02e4a6afd791bbe3b923a9a57ca9aeab7314c21c84bf9ff05"}, + {file = "websockets-14.1-cp312-cp312-win32.whl", hash = "sha256:1d045cbe1358d76b24d5e20e7b1878efe578d9897a25c24e6006eef788c0fdf0"}, + {file = "websockets-14.1-cp312-cp312-win_amd64.whl", hash = "sha256:90f4c7a069c733d95c308380aae314f2cb45bd8a904fb03eb36d1a4983a4993f"}, + {file = "websockets-14.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:3630b670d5057cd9e08b9c4dab6493670e8e762a24c2c94ef312783870736ab9"}, + {file = "websockets-14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:36ebd71db3b89e1f7b1a5deaa341a654852c3518ea7a8ddfdf69cc66acc2db1b"}, + {file = "websockets-14.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5b918d288958dc3fa1c5a0b9aa3256cb2b2b84c54407f4813c45d52267600cd3"}, + {file = "websockets-14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00fe5da3f037041da1ee0cf8e308374e236883f9842c7c465aa65098b1c9af59"}, + {file = "websockets-14.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8149a0f5a72ca36720981418eeffeb5c2729ea55fa179091c81a0910a114a5d2"}, + {file = "websockets-14.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77569d19a13015e840b81550922056acabc25e3f52782625bc6843cfa034e1da"}, + {file = "websockets-14.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cf5201a04550136ef870aa60ad3d29d2a59e452a7f96b94193bee6d73b8ad9a9"}, + {file = "websockets-14.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:88cf9163ef674b5be5736a584c999e98daf3aabac6e536e43286eb74c126b9c7"}, + {file = "websockets-14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:836bef7ae338a072e9d1863502026f01b14027250a4545672673057997d5c05a"}, + {file = "websockets-14.1-cp313-cp313-win32.whl", hash = "sha256:0d4290d559d68288da9f444089fd82490c8d2744309113fc26e2da6e48b65da6"}, + {file = "websockets-14.1-cp313-cp313-win_amd64.whl", hash = "sha256:8621a07991add373c3c5c2cf89e1d277e49dc82ed72c75e3afc74bd0acc446f0"}, + {file = "websockets-14.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:01bb2d4f0a6d04538d3c5dfd27c0643269656c28045a53439cbf1c004f90897a"}, + {file = "websockets-14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:414ffe86f4d6f434a8c3b7913655a1a5383b617f9bf38720e7c0799fac3ab1c6"}, + {file = "websockets-14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8fda642151d5affdee8a430bd85496f2e2517be3a2b9d2484d633d5712b15c56"}, + {file = "websockets-14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd7c11968bc3860d5c78577f0dbc535257ccec41750675d58d8dc66aa47fe52c"}, + {file = "websockets-14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a032855dc7db987dff813583d04f4950d14326665d7e714d584560b140ae6b8b"}, + {file = "websockets-14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7e7ea2f782408c32d86b87a0d2c1fd8871b0399dd762364c731d86c86069a78"}, + {file = "websockets-14.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:39450e6215f7d9f6f7bc2a6da21d79374729f5d052333da4d5825af8a97e6735"}, + {file = "websockets-14.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ceada5be22fa5a5a4cdeec74e761c2ee7db287208f54c718f2df4b7e200b8d4a"}, + {file = "websockets-14.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3fc753451d471cff90b8f467a1fc0ae64031cf2d81b7b34e1811b7e2691bc4bc"}, + {file = "websockets-14.1-cp39-cp39-win32.whl", hash = "sha256:14839f54786987ccd9d03ed7f334baec0f02272e7ec4f6e9d427ff584aeea8b4"}, + {file = "websockets-14.1-cp39-cp39-win_amd64.whl", hash = "sha256:d9fd19ecc3a4d5ae82ddbfb30962cf6d874ff943e56e0c81f5169be2fda62979"}, + {file = "websockets-14.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e5dc25a9dbd1a7f61eca4b7cb04e74ae4b963d658f9e4f9aad9cd00b688692c8"}, + {file = "websockets-14.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:04a97aca96ca2acedf0d1f332c861c5a4486fdcba7bcef35873820f940c4231e"}, + {file = "websockets-14.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df174ece723b228d3e8734a6f2a6febbd413ddec39b3dc592f5a4aa0aff28098"}, + {file = "websockets-14.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:034feb9f4286476f273b9a245fb15f02c34d9586a5bc936aff108c3ba1b21beb"}, + {file = "websockets-14.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c308dabd2b380807ab64b62985eaccf923a78ebc572bd485375b9ca2b7dc7"}, + {file = "websockets-14.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5a42d3ecbb2db5080fc578314439b1d79eef71d323dc661aa616fb492436af5d"}, + {file = "websockets-14.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ddaa4a390af911da6f680be8be4ff5aaf31c4c834c1a9147bc21cbcbca2d4370"}, + {file = "websockets-14.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a4c805c6034206143fbabd2d259ec5e757f8b29d0a2f0bf3d2fe5d1f60147a4a"}, + {file = "websockets-14.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:205f672a6c2c671a86d33f6d47c9b35781a998728d2c7c2a3e1cf3333fcb62b7"}, + {file = "websockets-14.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef440054124728cc49b01c33469de06755e5a7a4e83ef61934ad95fc327fbb0"}, + {file = "websockets-14.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7591d6f440af7f73c4bd9404f3772bfee064e639d2b6cc8c94076e71b2471c1"}, + {file = "websockets-14.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:25225cc79cfebc95ba1d24cd3ab86aaa35bcd315d12fa4358939bd55e9bd74a5"}, + {file = "websockets-14.1-py3-none-any.whl", hash = "sha256:4d4fc827a20abe6d544a119896f6b78ee13fe81cbfef416f3f2ddf09a03f0e2e"}, + {file = "websockets-14.1.tar.gz", hash = "sha256:398b10c77d471c0aab20a845e7a60076b6390bfdaac7a6d2edb0d2c59d75e8d8"}, ] [[package]] @@ -9878,6 +9826,20 @@ files = [ {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, ] +[[package]] +name = "wsproto" +version = "1.2.0" +description = "WebSockets state-machine based protocol implementation" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"}, + {file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"}, +] + +[package.dependencies] +h11 = ">=0.9.0,<1" + [[package]] name = "xlsxwriter" version = "3.2.0" @@ -10119,13 +10081,13 @@ propcache = ">=0.2.0" [[package]] name = "zipp" -version = "3.20.2" +version = "3.21.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, - {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, + {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, + {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, ] [package.extras] @@ -10211,4 +10173,4 @@ testing = ["coverage[toml]", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "a552f630dfdb9221eda6932e71e67a935c52ebfe4388ec9ef4b3245e7df2f82b" +content-hash = "f1ddd50f77f351937228824af032cdfcbda84b5ead0d98d4245dd52ad1bc7180" diff --git a/pyproject.toml b/pyproject.toml index f1e145fa3cd0..9f3ec7b6dbf9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,9 +61,7 @@ protobuf = "^4.21.6,<5.0.0" # chromadb currently fails on 5.0+ opentelemetry-api = "1.25.0" opentelemetry-exporter-otlp-proto-grpc = "1.25.0" modal = "^0.64.145" -runloop-api-client = "0.7.0" -pygithub = "^2.5.0" -openhands-aci = "^0.1.0" +python-socketio = "^5.11.4" [tool.poetry.group.llama-index.dependencies] llama-index = "*" From 2fa8c4e14d12ad2bbd5da85801f13f185e6bb7e5 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Wed, 13 Nov 2024 12:31:02 -0700 Subject: [PATCH 02/91] Initial stab at reimplementing session management --- openhands/server/listen.py | 9 ++------- openhands/server/session/manager.py | 23 ++++++++++++++++++----- openhands/server/session/session.py | 8 +++----- 3 files changed, 23 insertions(+), 17 deletions(-) diff --git a/openhands/server/listen.py b/openhands/server/listen.py index a487a6d5cac9..b7011cf2ca8e 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -921,9 +921,8 @@ async def connect(session_id: str, environ): sio.send({'error': 'Invalid token', 'error_code': 401}) return logger.info(f'Renaming existing session: {old_session_id} to {session_id}') - session = session_manager.rename_existing_session(old_session_id, session_id) + session = session_manager.alias_existing_session(old_session_id, session_id) else: - session_id = str(uuid.uuid4()) jwt_token = sign_token({'sid': session_id}, config.jwt_secret) logger.info(f'New session: {session_id}') session = session_manager.add_new_session(sio, session_id) @@ -966,8 +965,4 @@ async def oh_action(session_id, data): @sio.event def disconnect(sid): logger.info(f'SIO:DISCONNECT:{sid}') - # I dunno about this - should we create a new one? - #session = session_manager.close(session_id) - #if session is None: - # raise ValueError(f'no_such_session_id:{session_id}') - + #session_manager.stop_session(sid) # I dunno about this - should we do this later? diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index 92b6a3d4eff1..e4dc9537864d 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -18,15 +18,12 @@ class SessionManager: file_store: FileStore sessions: dict[str, Session] = field(default_factory=dict) - def add_or_restart_session(self, sid: str, ws_conn: WebSocket, sio: socketio.AsyncServer | None, socket_id: str = None) -> Session: + def add_or_restart_session(self, sid: str, ws_conn: WebSocket) -> Session: session = Session( - sid=sid, file_store=self.file_store, ws=ws_conn, config=self.config, sio=sio, socket_id=socket_id + sid=sid, file_store=self.file_store, ws=ws_conn, config=self.config ) self.sessions[sid] = session return session - - def get_existing_session(self, sid: str): - return self.sessions.get(sid) async def attach_to_conversation(self, sid: str) -> Conversation | None: start_time = time.time() @@ -48,3 +45,19 @@ async def stop_session(self, sid: str) -> bool: if session: session.close() return bool(session) + + def get_existing_session(self, sio: socketio.AsyncServer | None, sid: str = None): + return self.sessions.get(sid) + + def add_new_session(self, sio: socketio.AsyncServer | None, sid: str = None): + session = Session( + sid=sid, file_store=self.file_store, config=self.config, sio=sio + ) + self.sessions[sid] = session + return session + + def alias_existing_session(self, old_sid: str, new_sid: str): + session = self.sessions.pop(old_sid) + if not session: + raise RuntimeError(f'unknown_session:{old_sid}') + self.sessions[new_sid] = session diff --git a/openhands/server/session/session.py b/openhands/server/session/session.py index ca8156df5d99..d164976cbc67 100644 --- a/openhands/server/session/session.py +++ b/openhands/server/session/session.py @@ -31,19 +31,17 @@ class Session: sid: str websocket: WebSocket | None sio: socketio.AsyncServer | None - socket_id: str | None last_active_ts: int = 0 is_alive: bool = True agent_session: AgentSession loop: asyncio.AbstractEventLoop def __init__( - self, sid: str, ws: WebSocket | None, config: AppConfig, file_store: FileStore, sio: socketio.AsyncServer | None, socket_id: str | None + self, sid: str, ws: WebSocket | None, config: AppConfig, file_store: FileStore, sio: socketio.AsyncServer | None ): self.sid = sid self.websocket = ws self.sio = sio - self.socket_id = socket_id self.last_active_ts = int(time.time()) self.agent_session = AgentSession( sid, file_store, status_callback=self.queue_status_message @@ -189,8 +187,8 @@ async def send(self, data: dict[str, object]) -> bool: return False if self.websocket: await self.websocket.send_json(data) - if self.socket_id: - await self.sio.emit(data, to=self.socket_id) + if self.sio: + await self.sio.emit(data, to=self.sid) await asyncio.sleep(0.001) # This flushes the data to the client self.last_active_ts = int(time.time()) return True From 5973c0c2699bc717e12fb4fbda2c82fccd0e554d Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Wed, 13 Nov 2024 12:55:51 -0700 Subject: [PATCH 03/91] Working through socket issues --- openhands/server/listen.py | 3 ++- openhands/server/session/manager.py | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/openhands/server/listen.py b/openhands/server/listen.py index b7011cf2ca8e..0d1b1a25f5c3 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -931,7 +931,8 @@ async def connect(session_id: str, environ): if not await authenticate_github_user(github_token): raise RuntimeError(status.WS_1008_POLICY_VIOLATION) - logger.info("TODO: Session work here...") + # Read fails because not started! + await session.send({'token': jwt_token, 'status': 'ok'}) latest_event_id = int(environ.get('HTTP_LATEST_EVENT_ID', -1)) diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index e4dc9537864d..7a1d22644729 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -20,7 +20,7 @@ class SessionManager: def add_or_restart_session(self, sid: str, ws_conn: WebSocket) -> Session: session = Session( - sid=sid, file_store=self.file_store, ws=ws_conn, config=self.config + sid=sid, file_store=self.file_store, ws=ws_conn, config=self.config, sio=None ) self.sessions[sid] = session return session @@ -51,7 +51,7 @@ def get_existing_session(self, sio: socketio.AsyncServer | None, sid: str = None def add_new_session(self, sio: socketio.AsyncServer | None, sid: str = None): session = Session( - sid=sid, file_store=self.file_store, config=self.config, sio=sio + sid=sid, file_store=self.file_store, config=self.config, sio=sio, ws=None ) self.sessions[sid] = session return session From 18e774dd8a1ce31d826c8ae88bbcba17b60825cb Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Wed, 13 Nov 2024 13:37:07 -0700 Subject: [PATCH 04/91] Fix for merge error --- poetry.lock | 307 +++++++++++++++++++++++++++++++------------------ pyproject.toml | 3 + 2 files changed, 198 insertions(+), 112 deletions(-) diff --git a/poetry.lock b/poetry.lock index 3bd30775b2c1..56d184e18ef0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -25,102 +25,87 @@ files = [ [[package]] name = "aiohttp" -version = "3.10.10" +version = "3.11.0" description = "Async http client/server framework (asyncio)" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "aiohttp-3.10.10-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:be7443669ae9c016b71f402e43208e13ddf00912f47f623ee5994e12fc7d4b3f"}, - {file = "aiohttp-3.10.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7b06b7843929e41a94ea09eb1ce3927865387e3e23ebe108e0d0d09b08d25be9"}, - {file = "aiohttp-3.10.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:333cf6cf8e65f6a1e06e9eb3e643a0c515bb850d470902274239fea02033e9a8"}, - {file = "aiohttp-3.10.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:274cfa632350225ce3fdeb318c23b4a10ec25c0e2c880eff951a3842cf358ac1"}, - {file = "aiohttp-3.10.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9e5e4a85bdb56d224f412d9c98ae4cbd032cc4f3161818f692cd81766eee65a"}, - {file = "aiohttp-3.10.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b606353da03edcc71130b52388d25f9a30a126e04caef1fd637e31683033abd"}, - {file = "aiohttp-3.10.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab5a5a0c7a7991d90446a198689c0535be89bbd6b410a1f9a66688f0880ec026"}, - {file = "aiohttp-3.10.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:578a4b875af3e0daaf1ac6fa983d93e0bbfec3ead753b6d6f33d467100cdc67b"}, - {file = "aiohttp-3.10.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8105fd8a890df77b76dd3054cddf01a879fc13e8af576805d667e0fa0224c35d"}, - {file = "aiohttp-3.10.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3bcd391d083f636c06a68715e69467963d1f9600f85ef556ea82e9ef25f043f7"}, - {file = "aiohttp-3.10.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fbc6264158392bad9df19537e872d476f7c57adf718944cc1e4495cbabf38e2a"}, - {file = "aiohttp-3.10.10-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e48d5021a84d341bcaf95c8460b152cfbad770d28e5fe14a768988c461b821bc"}, - {file = "aiohttp-3.10.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2609e9ab08474702cc67b7702dbb8a80e392c54613ebe80db7e8dbdb79837c68"}, - {file = "aiohttp-3.10.10-cp310-cp310-win32.whl", hash = "sha256:84afcdea18eda514c25bc68b9af2a2b1adea7c08899175a51fe7c4fb6d551257"}, - {file = "aiohttp-3.10.10-cp310-cp310-win_amd64.whl", hash = "sha256:9c72109213eb9d3874f7ac8c0c5fa90e072d678e117d9061c06e30c85b4cf0e6"}, - {file = "aiohttp-3.10.10-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c30a0eafc89d28e7f959281b58198a9fa5e99405f716c0289b7892ca345fe45f"}, - {file = "aiohttp-3.10.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:258c5dd01afc10015866114e210fb7365f0d02d9d059c3c3415382ab633fcbcb"}, - {file = "aiohttp-3.10.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:15ecd889a709b0080f02721255b3f80bb261c2293d3c748151274dfea93ac871"}, - {file = "aiohttp-3.10.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3935f82f6f4a3820270842e90456ebad3af15810cf65932bd24da4463bc0a4c"}, - {file = "aiohttp-3.10.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:413251f6fcf552a33c981c4709a6bba37b12710982fec8e558ae944bfb2abd38"}, - {file = "aiohttp-3.10.10-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1720b4f14c78a3089562b8875b53e36b51c97c51adc53325a69b79b4b48ebcb"}, - {file = "aiohttp-3.10.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:679abe5d3858b33c2cf74faec299fda60ea9de62916e8b67e625d65bf069a3b7"}, - {file = "aiohttp-3.10.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:79019094f87c9fb44f8d769e41dbb664d6e8fcfd62f665ccce36762deaa0e911"}, - {file = "aiohttp-3.10.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe2fb38c2ed905a2582948e2de560675e9dfbee94c6d5ccdb1301c6d0a5bf092"}, - {file = "aiohttp-3.10.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a3f00003de6eba42d6e94fabb4125600d6e484846dbf90ea8e48a800430cc142"}, - {file = "aiohttp-3.10.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:1bbb122c557a16fafc10354b9d99ebf2f2808a660d78202f10ba9d50786384b9"}, - {file = "aiohttp-3.10.10-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:30ca7c3b94708a9d7ae76ff281b2f47d8eaf2579cd05971b5dc681db8caac6e1"}, - {file = "aiohttp-3.10.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:df9270660711670e68803107d55c2b5949c2e0f2e4896da176e1ecfc068b974a"}, - {file = "aiohttp-3.10.10-cp311-cp311-win32.whl", hash = "sha256:aafc8ee9b742ce75044ae9a4d3e60e3d918d15a4c2e08a6c3c3e38fa59b92d94"}, - {file = "aiohttp-3.10.10-cp311-cp311-win_amd64.whl", hash = "sha256:362f641f9071e5f3ee6f8e7d37d5ed0d95aae656adf4ef578313ee585b585959"}, - {file = "aiohttp-3.10.10-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9294bbb581f92770e6ed5c19559e1e99255e4ca604a22c5c6397b2f9dd3ee42c"}, - {file = "aiohttp-3.10.10-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a8fa23fe62c436ccf23ff930149c047f060c7126eae3ccea005f0483f27b2e28"}, - {file = "aiohttp-3.10.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5c6a5b8c7926ba5d8545c7dd22961a107526562da31a7a32fa2456baf040939f"}, - {file = "aiohttp-3.10.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:007ec22fbc573e5eb2fb7dec4198ef8f6bf2fe4ce20020798b2eb5d0abda6138"}, - {file = "aiohttp-3.10.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9627cc1a10c8c409b5822a92d57a77f383b554463d1884008e051c32ab1b3742"}, - {file = "aiohttp-3.10.10-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:50edbcad60d8f0e3eccc68da67f37268b5144ecc34d59f27a02f9611c1d4eec7"}, - {file = "aiohttp-3.10.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a45d85cf20b5e0d0aa5a8dca27cce8eddef3292bc29d72dcad1641f4ed50aa16"}, - {file = "aiohttp-3.10.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b00807e2605f16e1e198f33a53ce3c4523114059b0c09c337209ae55e3823a8"}, - {file = "aiohttp-3.10.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f2d4324a98062be0525d16f768a03e0bbb3b9fe301ceee99611dc9a7953124e6"}, - {file = "aiohttp-3.10.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:438cd072f75bb6612f2aca29f8bd7cdf6e35e8f160bc312e49fbecab77c99e3a"}, - {file = "aiohttp-3.10.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:baa42524a82f75303f714108fea528ccacf0386af429b69fff141ffef1c534f9"}, - {file = "aiohttp-3.10.10-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a7d8d14fe962153fc681f6366bdec33d4356f98a3e3567782aac1b6e0e40109a"}, - {file = "aiohttp-3.10.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c1277cd707c465cd09572a774559a3cc7c7a28802eb3a2a9472588f062097205"}, - {file = "aiohttp-3.10.10-cp312-cp312-win32.whl", hash = "sha256:59bb3c54aa420521dc4ce3cc2c3fe2ad82adf7b09403fa1f48ae45c0cbde6628"}, - {file = "aiohttp-3.10.10-cp312-cp312-win_amd64.whl", hash = "sha256:0e1b370d8007c4ae31ee6db7f9a2fe801a42b146cec80a86766e7ad5c4a259cf"}, - {file = "aiohttp-3.10.10-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ad7593bb24b2ab09e65e8a1d385606f0f47c65b5a2ae6c551db67d6653e78c28"}, - {file = "aiohttp-3.10.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1eb89d3d29adaf533588f209768a9c02e44e4baf832b08118749c5fad191781d"}, - {file = "aiohttp-3.10.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3fe407bf93533a6fa82dece0e74dbcaaf5d684e5a51862887f9eaebe6372cd79"}, - {file = "aiohttp-3.10.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50aed5155f819873d23520919e16703fc8925e509abbb1a1491b0087d1cd969e"}, - {file = "aiohttp-3.10.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4f05e9727ce409358baa615dbeb9b969db94324a79b5a5cea45d39bdb01d82e6"}, - {file = "aiohttp-3.10.10-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dffb610a30d643983aeb185ce134f97f290f8935f0abccdd32c77bed9388b42"}, - {file = "aiohttp-3.10.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa6658732517ddabe22c9036479eabce6036655ba87a0224c612e1ae6af2087e"}, - {file = "aiohttp-3.10.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:741a46d58677d8c733175d7e5aa618d277cd9d880301a380fd296975a9cdd7bc"}, - {file = "aiohttp-3.10.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e00e3505cd80440f6c98c6d69269dcc2a119f86ad0a9fd70bccc59504bebd68a"}, - {file = "aiohttp-3.10.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ffe595f10566f8276b76dc3a11ae4bb7eba1aac8ddd75811736a15b0d5311414"}, - {file = "aiohttp-3.10.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:bdfcf6443637c148c4e1a20c48c566aa694fa5e288d34b20fcdc58507882fed3"}, - {file = "aiohttp-3.10.10-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d183cf9c797a5291e8301790ed6d053480ed94070637bfaad914dd38b0981f67"}, - {file = "aiohttp-3.10.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:77abf6665ae54000b98b3c742bc6ea1d1fb31c394bcabf8b5d2c1ac3ebfe7f3b"}, - {file = "aiohttp-3.10.10-cp313-cp313-win32.whl", hash = "sha256:4470c73c12cd9109db8277287d11f9dd98f77fc54155fc71a7738a83ffcc8ea8"}, - {file = "aiohttp-3.10.10-cp313-cp313-win_amd64.whl", hash = "sha256:486f7aabfa292719a2753c016cc3a8f8172965cabb3ea2e7f7436c7f5a22a151"}, - {file = "aiohttp-3.10.10-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:1b66ccafef7336a1e1f0e389901f60c1d920102315a56df85e49552308fc0486"}, - {file = "aiohttp-3.10.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:acd48d5b80ee80f9432a165c0ac8cbf9253eaddb6113269a5e18699b33958dbb"}, - {file = "aiohttp-3.10.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3455522392fb15ff549d92fbf4b73b559d5e43dc522588f7eb3e54c3f38beee7"}, - {file = "aiohttp-3.10.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45c3b868724137f713a38376fef8120c166d1eadd50da1855c112fe97954aed8"}, - {file = "aiohttp-3.10.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:da1dee8948d2137bb51fbb8a53cce6b1bcc86003c6b42565f008438b806cccd8"}, - {file = "aiohttp-3.10.10-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c5ce2ce7c997e1971b7184ee37deb6ea9922ef5163c6ee5aa3c274b05f9e12fa"}, - {file = "aiohttp-3.10.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28529e08fde6f12eba8677f5a8608500ed33c086f974de68cc65ab218713a59d"}, - {file = "aiohttp-3.10.10-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f7db54c7914cc99d901d93a34704833568d86c20925b2762f9fa779f9cd2e70f"}, - {file = "aiohttp-3.10.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:03a42ac7895406220124c88911ebee31ba8b2d24c98507f4a8bf826b2937c7f2"}, - {file = "aiohttp-3.10.10-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:7e338c0523d024fad378b376a79faff37fafb3c001872a618cde1d322400a572"}, - {file = "aiohttp-3.10.10-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:038f514fe39e235e9fef6717fbf944057bfa24f9b3db9ee551a7ecf584b5b480"}, - {file = "aiohttp-3.10.10-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:64f6c17757251e2b8d885d728b6433d9d970573586a78b78ba8929b0f41d045a"}, - {file = "aiohttp-3.10.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:93429602396f3383a797a2a70e5f1de5df8e35535d7806c9f91df06f297e109b"}, - {file = "aiohttp-3.10.10-cp38-cp38-win32.whl", hash = "sha256:c823bc3971c44ab93e611ab1a46b1eafeae474c0c844aff4b7474287b75fe49c"}, - {file = "aiohttp-3.10.10-cp38-cp38-win_amd64.whl", hash = "sha256:54ca74df1be3c7ca1cf7f4c971c79c2daf48d9aa65dea1a662ae18926f5bc8ce"}, - {file = "aiohttp-3.10.10-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:01948b1d570f83ee7bbf5a60ea2375a89dfb09fd419170e7f5af029510033d24"}, - {file = "aiohttp-3.10.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9fc1500fd2a952c5c8e3b29aaf7e3cc6e27e9cfc0a8819b3bce48cc1b849e4cc"}, - {file = "aiohttp-3.10.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f614ab0c76397661b90b6851a030004dac502e48260ea10f2441abd2207fbcc7"}, - {file = "aiohttp-3.10.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00819de9e45d42584bed046314c40ea7e9aea95411b38971082cad449392b08c"}, - {file = "aiohttp-3.10.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05646ebe6b94cc93407b3bf34b9eb26c20722384d068eb7339de802154d61bc5"}, - {file = "aiohttp-3.10.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:998f3bd3cfc95e9424a6acd7840cbdd39e45bc09ef87533c006f94ac47296090"}, - {file = "aiohttp-3.10.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9010c31cd6fa59438da4e58a7f19e4753f7f264300cd152e7f90d4602449762"}, - {file = "aiohttp-3.10.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ea7ffc6d6d6f8a11e6f40091a1040995cdff02cfc9ba4c2f30a516cb2633554"}, - {file = "aiohttp-3.10.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ef9c33cc5cbca35808f6c74be11eb7f5f6b14d2311be84a15b594bd3e58b5527"}, - {file = "aiohttp-3.10.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ce0cdc074d540265bfeb31336e678b4e37316849d13b308607efa527e981f5c2"}, - {file = "aiohttp-3.10.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:597a079284b7ee65ee102bc3a6ea226a37d2b96d0418cc9047490f231dc09fe8"}, - {file = "aiohttp-3.10.10-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:7789050d9e5d0c309c706953e5e8876e38662d57d45f936902e176d19f1c58ab"}, - {file = "aiohttp-3.10.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e7f8b04d83483577fd9200461b057c9f14ced334dcb053090cea1da9c8321a91"}, - {file = "aiohttp-3.10.10-cp39-cp39-win32.whl", hash = "sha256:c02a30b904282777d872266b87b20ed8cc0d1501855e27f831320f471d54d983"}, - {file = "aiohttp-3.10.10-cp39-cp39-win_amd64.whl", hash = "sha256:edfe3341033a6b53a5c522c802deb2079eee5cbfbb0af032a55064bd65c73a23"}, - {file = "aiohttp-3.10.10.tar.gz", hash = "sha256:0631dd7c9f0822cc61c88586ca76d5b5ada26538097d0f1df510b082bad3411a"}, + {file = "aiohttp-3.11.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:024409c1b1d6076d0ed933dcebd7e4fc6f3320a227bfa0c1b6b93a8b5a146f04"}, + {file = "aiohttp-3.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:62502b8ffee8c6a4b5c6bf99d1de277d42bf51b2fb713975d9b63b560150b7ac"}, + {file = "aiohttp-3.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c54c635d1f52490cde7ef3a423645167a8284e452a35405d5c7dc1242a8e75c9"}, + {file = "aiohttp-3.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:104ea21994b1403e4c1b398866f1187c1694fa291314ad7216ec1d8ec6b49f38"}, + {file = "aiohttp-3.11.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04b24497b3baf15035730de5f207ade88a67d4483a5f16ced7ece348933a5b47"}, + {file = "aiohttp-3.11.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08474e71772a516ba2e2167b4707af8361d2c452b3d8a5364c984f4867869499"}, + {file = "aiohttp-3.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f40380c96dd407dfa84eb2d264e68aa47717b53bdbe210a59cc3c35a4635f195"}, + {file = "aiohttp-3.11.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1668ef2f3a7ec9881f4b6a917e5f97c87a343fa6b0d5fc826b7b0297ddd0887"}, + {file = "aiohttp-3.11.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f3bf5c132eb48002bcc3825702d241d35b4e9585009e65e9dcf9c4635d0b7424"}, + {file = "aiohttp-3.11.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:c0315978b2a4569e03fb59100f6a7e7d23f718a4521491f5c13d946d37549f3d"}, + {file = "aiohttp-3.11.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d5cae4cd271e20b7ab757e966cc919186b9f02535418ab36c471a5377ef4deaa"}, + {file = "aiohttp-3.11.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:31b91ff3a1fcb206a1fa76e0de1f08c9ffb1dc0deb7296fa2618adfe380fc676"}, + {file = "aiohttp-3.11.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ebf610c37df4f09c71c9bbf8309b4b459107e6fe889ac0d7e16f6e4ebd975f86"}, + {file = "aiohttp-3.11.0-cp310-cp310-win32.whl", hash = "sha256:b40c304ab01e89ad0aeeecf91bbaa6ae3b00e27b796c9e8d50b71a4a7e885cc8"}, + {file = "aiohttp-3.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:cd0834e4260eab78671b81d34f110fbaac449563e48d419cec0030d9a8e58693"}, + {file = "aiohttp-3.11.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:89a96a0696dc67d548f69cb518c581a7a33cc1f26ab42229dea1709217c9d926"}, + {file = "aiohttp-3.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f6b925c7775ab857bdc1e52e1f5abcae7d18751c09b751aeb641a5276d9b990e"}, + {file = "aiohttp-3.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7867d0808614f04e78e0a8d5a2c1f8ac6bc626a0c0e2f62be48be6b749e2f8b2"}, + {file = "aiohttp-3.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:229ae13959a5f499d90ffbb4b9eac2255d8599315027d6f7c22fa9803a94d5b1"}, + {file = "aiohttp-3.11.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:62a2f5268b672087c45b33479ba1bb1d5a48c6d76c133cfce3a4f77410c200d1"}, + {file = "aiohttp-3.11.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a896059b6937d1a22d8ee8377cdcd097bd26cd8c653b8f972051488b9baadee9"}, + {file = "aiohttp-3.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:104deb7873681273c5daa13c41924693df394043a118dae90387d35bc5531788"}, + {file = "aiohttp-3.11.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae36ae52b0c22fb69fb8b744eff82a20db512a29eafc6e3a4ab43b17215b219d"}, + {file = "aiohttp-3.11.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b7349205bb163318dcc102329d30be59a647a3d24c82c3d91ed35b7e7301ea7e"}, + {file = "aiohttp-3.11.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9095580806d9ed07c0c29b23364a0b1fb78258ef9f4bddf7e55bac0e475d4edf"}, + {file = "aiohttp-3.11.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:4d218d3eca40196384ad3b481309c56fd60e664128885d1734da0a8aa530d433"}, + {file = "aiohttp-3.11.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:6533dd06df3d17d1756829b68b365b1583929b54082db8f65083a4184bf68322"}, + {file = "aiohttp-3.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:72cd984f7f14e8c01b3e38f18f39ea85dba84e52ea05e37116ba5e2a72eef396"}, + {file = "aiohttp-3.11.0-cp311-cp311-win32.whl", hash = "sha256:c1828e10c3a49e2b234b87600ecb68a92b8a8dcf8b99bca9447f16c4baaa1630"}, + {file = "aiohttp-3.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:900ff74d78eb580ae4aa5883242893b123a0c442a46570902500f08d6a7e6696"}, + {file = "aiohttp-3.11.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f8f0d79b923070f25674e4ea8f3d61c9d89d24d9598d50ff32c5b9b23c79a25b"}, + {file = "aiohttp-3.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:113bf06b029143e94a47c4f36e11a8b7e396e9d1f1fc8cea58e6b7e370cfed38"}, + {file = "aiohttp-3.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3e1ed8d152cccceffb1ee7a2ac227c16372e453fb11b3aeaa56783049b85d3f6"}, + {file = "aiohttp-3.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2e82e515e268b965424ecabebd91834a41b36260b6ef5db015ee12ddb28ef3"}, + {file = "aiohttp-3.11.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c1c49bc393d854d4421ebc174a0a41f9261f50d3694d8ca277146cbbcfd24ee7"}, + {file = "aiohttp-3.11.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57e17c6d71f2dc857a8a1d09be1be7802e35d90fb4ba4b06cf1aab6414a57894"}, + {file = "aiohttp-3.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12071dd2cc95ba81e0f2737bebcb98b2a8656015e87772e84e8fb9e635b5da6e"}, + {file = "aiohttp-3.11.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97056d3422594e0787733ac4c45bef58722d452f4dc6615fee42f59fe51707dd"}, + {file = "aiohttp-3.11.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2ec5efbc872b00ddd85e3904059d274f284cff314e13f48776050ca2c58f451d"}, + {file = "aiohttp-3.11.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:dd505a1121ad5b666191840b7bd1d8cb917df2647deeca6f3474331b72452362"}, + {file = "aiohttp-3.11.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:600b1d9f86a130131915e2f2127664311b33902c486b21a747d626f5144b4471"}, + {file = "aiohttp-3.11.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8c47a0ba6c2b3d3e5715f8338d657badd21f778c6be16701922c65521c5ecfc9"}, + {file = "aiohttp-3.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8b323b5d3aef7dd811424c269322eec58a977c0c8152e650159e47210d900504"}, + {file = "aiohttp-3.11.0-cp312-cp312-win32.whl", hash = "sha256:aabc4e92cb153636d6be54e84dad1b252ddb9aebe077942b6dcffe5e468d476a"}, + {file = "aiohttp-3.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:508cfcc99534b1282595357592d8367b44392b21f6eb5d4dc021f8d0d809e94d"}, + {file = "aiohttp-3.11.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c98a596ac20e8980cc6f34c0c92a113e98eb08f3997c150064d26d2aeb043e5a"}, + {file = "aiohttp-3.11.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ad14cdc0fba4df31c0f6e06c21928c5b924725cbf60d0ccc5f6e7132636250e9"}, + {file = "aiohttp-3.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:170fb2324826bb9f08055a8291f42192ae5ee2f25b2966c8f0f4537c61d73a7b"}, + {file = "aiohttp-3.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdad66685fcf2ad14ce522cf849d4a025f4fd206d6cfc3f403d9873e4c243b03"}, + {file = "aiohttp-3.11.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8b95a63a8e8b5f0464bd8b1b0d59d2bec98a59b6aacc71e9be23df6989b3dfb"}, + {file = "aiohttp-3.11.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7bcfcede95531589295f56e924702cef7f9685c9e4e5407592e04ded6a65bf3"}, + {file = "aiohttp-3.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ecc2fb1a0a9d48cf773add34196cddf7e488e48e9596e090849751bf43098f4"}, + {file = "aiohttp-3.11.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8fef105113d56e817cb9bcc609667ee461321413a7b972b03f5b4939f40f307c"}, + {file = "aiohttp-3.11.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d33b4490026968bdc7f0729b9d87a3a6b1e09043557d2fc1c605c6072deb2f11"}, + {file = "aiohttp-3.11.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6362f50a6f0e5482c4330d2151cb682779230683da0e155c15ec9fc58cb50b6a"}, + {file = "aiohttp-3.11.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4f698aa61879df64425191d41213dfd99efdc1627e6398e6d7aa5c312fac9702"}, + {file = "aiohttp-3.11.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:0e7a0762cc29cd3acd01a4d2b547b3af7956ad230ebb80b529a8e4f3e4740fe8"}, + {file = "aiohttp-3.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b3e4fb7f5354d39490d8209aefdf5830b208d01c7293a2164e404312c3d8bc55"}, + {file = "aiohttp-3.11.0-cp313-cp313-win32.whl", hash = "sha256:6c5a6958f4366496004cf503d847093d464814543f157ef3b738bbf604232415"}, + {file = "aiohttp-3.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:3ed360d6672a9423aad39902a4e9fe305464d20ed7931dbdba30a4625782d875"}, + {file = "aiohttp-3.11.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d1ea006426edf7e1299c52a58b0443158012f7a56fed3515164b60bfcb1503a9"}, + {file = "aiohttp-3.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c5e6a1f8b0268ffa1c84d7c3558724956002ba8361176e76406233e704bbcffb"}, + {file = "aiohttp-3.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:40dc9446cff326672fcbf93efdb8ef7e949824de1097624efe4f61ac7f0d2c43"}, + {file = "aiohttp-3.11.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21b4545e8d96870da9652930c5198366605ff8f982757030e2148cf341e5746b"}, + {file = "aiohttp-3.11.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:37f8cf3c43f292d9bb3e6760476c2b55b9663a581fad682a586a410c43a7683e"}, + {file = "aiohttp-3.11.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:329f5059e0bf6983dceebac8e6ed20e75eaff6163b3414f4a4cb59e0d7037672"}, + {file = "aiohttp-3.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85ae6f182be72c3531915e90625cc65afce4df8a0fc4988bd52d8a5d5faaeb68"}, + {file = "aiohttp-3.11.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d664e5f937c08adb7908ea9f391fbf2928a9b09cb412ac0aba602bde9e499e4"}, + {file = "aiohttp-3.11.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:feca9fafa4385aea6759c171cd25ea82f7375312fca04178dae35331be45e538"}, + {file = "aiohttp-3.11.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:c415b9601ff50709d6050c8a9281733a9b042b9e589265ac40305b875cf9c463"}, + {file = "aiohttp-3.11.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:91d3991fad8b65e5dbc13cd95669ea689fe0a96ff63e4e64ac24ed724e4f8103"}, + {file = "aiohttp-3.11.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9231d610754724273a6ac05a1f177979490bfa6f84d49646df3928af2e88cfd5"}, + {file = "aiohttp-3.11.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:4e4e155968040e32c124a89852a1a5426d0e920a35f4331e1b3949037bfe93a3"}, + {file = "aiohttp-3.11.0-cp39-cp39-win32.whl", hash = "sha256:76d6ee8bb132f8ee0fcb0e205b4708ddb6fba524eb515ee168113063d825131b"}, + {file = "aiohttp-3.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:577c7429f8869fa30186fc2c9eee64d75a30b51b61f26aac9725866ae5985cfd"}, + {file = "aiohttp-3.11.0.tar.gz", hash = "sha256:f57a0de48dda792629e7952d34a0c7b81ea336bb9b721391c7c58145b237fe55"}, ] [package.dependencies] @@ -129,7 +114,8 @@ aiosignal = ">=1.1.2" attrs = ">=17.3.0" frozenlist = ">=1.1.1" multidict = ">=4.5,<7.0" -yarl = ">=1.12.0,<2.0" +propcache = ">=0.2.0" +yarl = ">=1.17.0,<2.0" [package.extras] speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] @@ -594,17 +580,17 @@ files = [ [[package]] name = "boto3" -version = "1.35.59" +version = "1.35.60" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.35.59-py3-none-any.whl", hash = "sha256:8f8ff97cb9cb2e1ec7374209d0c09c1926b75604d6464c34bafaffd6d6cf0529"}, - {file = "boto3-1.35.59.tar.gz", hash = "sha256:81f4d8d6eff3e26b82cabd42eda816cfac9482821fdef353f18d2ba2f6e75f2d"}, + {file = "boto3-1.35.60-py3-none-any.whl", hash = "sha256:a34d28de1a1f6ca6ec3edd05c26db16e422293d8f9dcd94f308059a434596753"}, + {file = "boto3-1.35.60.tar.gz", hash = "sha256:e573504c67c3e438fd4b0222119ed1a73b644c78eb3b6dee0b36a6c70ecf7677"}, ] [package.dependencies] -botocore = ">=1.35.59,<1.36.0" +botocore = ">=1.35.60,<1.36.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -613,13 +599,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.35.59" +version = "1.35.60" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.35.59-py3-none-any.whl", hash = "sha256:bcd66d7f55c8d1b6020eb86f2d87893fe591fb4be6a7d2a689c18be586452334"}, - {file = "botocore-1.35.59.tar.gz", hash = "sha256:de0ce655fedfc02c87869dfaa3b622488a17ff37da316ef8106cbe1573b83c98"}, + {file = "botocore-1.35.60-py3-none-any.whl", hash = "sha256:ddccfc39a0a55ac0321191a36d29c2ea9be2c96ceefb3928dd3c91c79c494d50"}, + {file = "botocore-1.35.60.tar.gz", hash = "sha256:378f53037d817bed2c04a006b7319745e664030182211429c924647273b29bc9"}, ] [package.dependencies] @@ -1562,6 +1548,17 @@ files = [ {file = "dirtyjson-1.0.8.tar.gz", hash = "sha256:90ca4a18f3ff30ce849d100dcf4a003953c79d3a2348ef056f1d9c22231a25fd"}, ] +[[package]] +name = "diskcache" +version = "5.6.3" +description = "Disk Cache -- Disk and file backed persistent cache." +optional = false +python-versions = ">=3" +files = [ + {file = "diskcache-5.6.3-py3-none-any.whl", hash = "sha256:5e31b2d5fbad117cc363ebaf6b689474db18a1f6438bc82358b024abd4c2ca19"}, + {file = "diskcache-5.6.3.tar.gz", hash = "sha256:2c3a3fa2743d8535d832ec61c2054a1641f41775aa7c556758a109941e33e4fc"}, +] + [[package]] name = "distlib" version = "0.3.9" @@ -1845,21 +1842,21 @@ pyflakes = ">=3.2.0,<3.3.0" [[package]] name = "flask" -version = "3.0.3" +version = "3.1.0" description = "A simple framework for building complex web applications." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "flask-3.0.3-py3-none-any.whl", hash = "sha256:34e815dfaa43340d1d15a5c3a02b8476004037eb4840b34910c6e21679d288f3"}, - {file = "flask-3.0.3.tar.gz", hash = "sha256:ceb27b0af3823ea2737928a4d99d125a06175b8512c445cbd9a9ce200ef76842"}, + {file = "flask-3.1.0-py3-none-any.whl", hash = "sha256:d667207822eb83f1c4b50949b1623c8fc8d51f2341d65f72e1a1815397551136"}, + {file = "flask-3.1.0.tar.gz", hash = "sha256:5f873c5184c897c8d9d1b05df1e3d01b14910ce69607a117bd3277098a5836ac"}, ] [package.dependencies] -blinker = ">=1.6.2" +blinker = ">=1.9" click = ">=8.1.3" -itsdangerous = ">=2.1.2" +itsdangerous = ">=2.2" Jinja2 = ">=3.1.2" -Werkzeug = ">=3.0.0" +Werkzeug = ">=3.1" [package.extras] async = ["asgiref (>=3.2)"] @@ -2292,13 +2289,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" -version = "2.152.0" +version = "2.153.0" description = "Google API Client Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "google_api_python_client-2.152.0-py2.py3-none-any.whl", hash = "sha256:3e429b4584f6ccb620c1c4cdd43a6d78c5a8d307fc64860026aa26e203e8f932"}, - {file = "google_api_python_client-2.152.0.tar.gz", hash = "sha256:9593476dc548fcba794f2ea0e3cfbf927de5a7b79379781c1664637bf246f072"}, + {file = "google_api_python_client-2.153.0-py2.py3-none-any.whl", hash = "sha256:6ff13bbfa92a57972e33ec3808e18309e5981b8ca1300e5da23bf2b4d6947384"}, + {file = "google_api_python_client-2.153.0.tar.gz", hash = "sha256:35cce8647f9c163fc04fb4d811fc91aae51954a2bdd74918decbe0e65d791dd2"}, ] [package.dependencies] @@ -5644,6 +5641,28 @@ files = [ [package.dependencies] numpy = {version = ">=1.26.0", markers = "python_version >= \"3.12\""} +[[package]] +name = "openhands-aci" +version = "0.1.0" +description = "An Agent-Computer Interface (ACI) designed for software development agents OpenHands." +optional = false +python-versions = "<4.0,>=3.12" +files = [ + {file = "openhands_aci-0.1.0-py3-none-any.whl", hash = "sha256:f28e5a32e394d1e643f79bf8af27fe44d039cb71729d590f9f3ee0c23c075f00"}, + {file = "openhands_aci-0.1.0.tar.gz", hash = "sha256:babc55f516efbb27eb7e528662e14b75c902965c48a110408fda824b83ea4461"}, +] + +[package.dependencies] +diskcache = ">=5.6.3,<6.0.0" +gitpython = "*" +grep-ast = "0.3.3" +litellm = "*" +networkx = "*" +numpy = "*" +pandas = "*" +scipy = "*" +tree-sitter = "0.21.3" + [[package]] name = "opentelemetry-api" version = "1.25.0" @@ -6791,6 +6810,25 @@ files = [ {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, ] +[[package]] +name = "pygithub" +version = "2.5.0" +description = "Use the full Github API v3" +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyGithub-2.5.0-py3-none-any.whl", hash = "sha256:b0b635999a658ab8e08720bdd3318893ff20e2275f6446fcf35bf3f44f2c0fd2"}, + {file = "pygithub-2.5.0.tar.gz", hash = "sha256:e1613ac508a9be710920d26eb18b1905ebd9926aa49398e88151c1b526aad3cf"}, +] + +[package.dependencies] +Deprecated = "*" +pyjwt = {version = ">=2.4.0", extras = ["crypto"]} +pynacl = ">=1.4.0" +requests = ">=2.14.0" +typing-extensions = ">=4.0.0" +urllib3 = ">=1.26.0" + [[package]] name = "pygments" version = "2.18.0" @@ -6855,6 +6893,32 @@ files = [ [package.dependencies] pybind11 = ">=2.2" +[[package]] +name = "pynacl" +version = "1.5.0" +description = "Python binding to the Networking and Cryptography (NaCl) library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858"}, + {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b"}, + {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff"}, + {file = "PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543"}, + {file = "PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93"}, + {file = "PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"}, +] + +[package.dependencies] +cffi = ">=1.4.1" + +[package.extras] +docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] +tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] + [[package]] name = "pyparsing" version = "3.2.0" @@ -7795,6 +7859,25 @@ files = [ {file = "ruff-0.7.1.tar.gz", hash = "sha256:9d8a41d4aa2dad1575adb98a82870cf5db5f76b2938cf2206c22c940034a36f4"}, ] +[[package]] +name = "runloop-api-client" +version = "0.7.0" +description = "The official Python library for the runloop API" +optional = false +python-versions = ">=3.7" +files = [ + {file = "runloop_api_client-0.7.0-py3-none-any.whl", hash = "sha256:3c3744e212fedeb36a12d5164e241152f5a8c8c5b59cbade39b81ec36a7d0905"}, + {file = "runloop_api_client-0.7.0.tar.gz", hash = "sha256:d1c2373775f426460665a2c0d6fc150f6525ac9ccceeff534fb1c554d1de2353"}, +] + +[package.dependencies] +anyio = ">=3.5.0,<5" +distro = ">=1.7.0,<2" +httpx = ">=0.23.0,<1" +pydantic = ">=1.9.0,<3" +sniffio = "*" +typing-extensions = ">=4.7,<5" + [[package]] name = "s3transfer" version = "0.10.3" @@ -10173,4 +10256,4 @@ testing = ["coverage[toml]", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "f1ddd50f77f351937228824af032cdfcbda84b5ead0d98d4245dd52ad1bc7180" +content-hash = "20ef9448659a5ce69af11dc157dd3be49e7947a34bd3617a84f9a6152c72ae0e" diff --git a/pyproject.toml b/pyproject.toml index 9f3ec7b6dbf9..c5c510585569 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,6 +61,9 @@ protobuf = "^4.21.6,<5.0.0" # chromadb currently fails on 5.0+ opentelemetry-api = "1.25.0" opentelemetry-exporter-otlp-proto-grpc = "1.25.0" modal = "^0.64.145" +runloop-api-client = "0.7.0" +pygithub = "^2.5.0" +openhands-aci = "^0.1.0" python-socketio = "^5.11.4" [tool.poetry.group.llama-index.dependencies] From 92586a090dfb0304b67b9228e52deb6f2c01bfee Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Wed, 13 Nov 2024 14:41:57 -0700 Subject: [PATCH 05/91] Server side init is working --- openhands/server/listen.py | 17 +++++++++-------- openhands/server/session/manager.py | 2 +- openhands/server/session/session.py | 2 +- 3 files changed, 11 insertions(+), 10 deletions(-) diff --git a/openhands/server/listen.py b/openhands/server/listen.py index 72d01fd60145..5349d961241c 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -1,5 +1,6 @@ from ast import parse import asyncio +import json import os import re import tempfile @@ -940,7 +941,7 @@ async def get_response(self, path: str, scope): @sio.event async def connect(session_id: str, environ): - logger.info(f"SIO:CONNECT: {session_id}") + logger.info(f"sio:connect: {session_id}") jwt_token = environ.get("HTTP_OH_TOKEN", '') if jwt_token: @@ -959,8 +960,6 @@ async def connect(session_id: str, environ): if not await authenticate_github_user(github_token): raise RuntimeError(status.WS_1008_POLICY_VIOLATION) - # Read fails because not started! - await session.send({'token': jwt_token, 'status': 'ok'}) latest_event_id = int(environ.get('HTTP_LATEST_EVENT_ID', -1)) @@ -983,15 +982,17 @@ async def connect(session_id: str, environ): @sio.event -async def oh_action(session_id, data): - logger.info(f"SIO:OH_ACTION:{session_id}") +async def oh_action(session_id: str, data: str): + + logger.info(f"sio:oh_action:{session_id}") session = session_manager.get_existing_session(session_id) if session is None: raise ValueError(f'no_such_session_id:{session_id}') - session.on_event(event_from_dict(data)) + await session.dispatch(json.loads(data)) + # session.on_event(event_from_dict(json.loads(data))) @sio.event def disconnect(sid): - logger.info(f'SIO:DISCONNECT:{sid}') - #session_manager.stop_session(sid) # I dunno about this - should we do this later? + logger.info(f'sio:disconnect:{sid}') + session_manager.stop_session(sid) diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index 7a1d22644729..7e6f10b5755e 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -46,7 +46,7 @@ async def stop_session(self, sid: str) -> bool: session.close() return bool(session) - def get_existing_session(self, sio: socketio.AsyncServer | None, sid: str = None): + def get_existing_session(self, sid: str): return self.sessions.get(sid) def add_new_session(self, sio: socketio.AsyncServer | None, sid: str = None): diff --git a/openhands/server/session/session.py b/openhands/server/session/session.py index a5f39b75ba70..957c880e5e03 100644 --- a/openhands/server/session/session.py +++ b/openhands/server/session/session.py @@ -188,7 +188,7 @@ async def send(self, data: dict[str, object]) -> bool: if self.websocket: await self.websocket.send_json(data) if self.sio: - await self.sio.emit(data, to=self.sid) + await self.sio.emit("oh_event", data, to=self.sid) await asyncio.sleep(0.001) # This flushes the data to the client self.last_active_ts = int(time.time()) return True From ca9aefd7b2cc7553db1aea08b451ec2eadfb6bb3 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Wed, 13 Nov 2024 16:18:39 -0700 Subject: [PATCH 06/91] WIP - frontend is still janky --- frontend/package-lock.json | 80 +++++++++++++++ frontend/package.json | 3 +- frontend/src/context/ws-client-provider.tsx | 106 +++++++++++--------- 3 files changed, 140 insertions(+), 49 deletions(-) diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 7a19eb1d65e7..314fca6df628 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -40,6 +40,7 @@ "react-textarea-autosize": "^8.5.4", "remark-gfm": "^4.0.0", "sirv-cli": "^3.0.0", + "socket.io-client": "^4.8.1", "tailwind-merge": "^2.5.4", "vite": "^5.4.9", "web-vitals": "^3.5.2", @@ -5576,6 +5577,11 @@ "integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==", "dev": true }, + "node_modules/@socket.io/component-emitter": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@socket.io/component-emitter/-/component-emitter-3.1.2.tgz", + "integrity": "sha512-9BCxFwvbGg/RsZK9tjXd8s4UcwR0MWeFQ1XEKIQVVvAGJyINdrqKMcTRyLoK8Rse1GjzLV9cwjWV1olXRWEXVA==" + }, "node_modules/@svgr/babel-plugin-add-jsx-attribute": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-8.0.0.tgz", @@ -8469,6 +8475,46 @@ "once": "^1.4.0" } }, + "node_modules/engine.io-client": { + "version": "6.6.2", + "resolved": "https://registry.npmjs.org/engine.io-client/-/engine.io-client-6.6.2.tgz", + "integrity": "sha512-TAr+NKeoVTjEVW8P3iHguO1LO6RlUz9O5Y8o7EY0fU+gY1NYqas7NN3slpFtbXEsLMHk0h90fJMfKjRkQ0qUIw==", + "dependencies": { + "@socket.io/component-emitter": "~3.1.0", + "debug": "~4.3.1", + "engine.io-parser": "~5.2.1", + "ws": "~8.17.1", + "xmlhttprequest-ssl": "~2.1.1" + } + }, + "node_modules/engine.io-client/node_modules/ws": { + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz", + "integrity": "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==", + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": ">=5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, + "node_modules/engine.io-parser": { + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.2.3.tgz", + "integrity": "sha512-HqD3yTBfnBxIrbnM1DoD6Pcq8NECnh8d4As1Qgh0z5Gg3jRRIqijury0CL3ghu/edArpUYiYqQiDUQBIs4np3Q==", + "engines": { + "node": ">=10.0.0" + } + }, "node_modules/entities": { "version": "4.5.0", "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", @@ -22587,6 +22633,32 @@ "tslib": "^2.0.3" } }, + "node_modules/socket.io-client": { + "version": "4.8.1", + "resolved": "https://registry.npmjs.org/socket.io-client/-/socket.io-client-4.8.1.tgz", + "integrity": "sha512-hJVXfu3E28NmzGk8o1sHhN3om52tRvwYeidbj7xKy2eIIse5IoKX3USlS6Tqt3BHAtflLIkCQBkzVrEEfWUyYQ==", + "dependencies": { + "@socket.io/component-emitter": "~3.1.0", + "debug": "~4.3.2", + "engine.io-client": "~6.6.1", + "socket.io-parser": "~4.2.4" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/socket.io-parser": { + "version": "4.2.4", + "resolved": "https://registry.npmjs.org/socket.io-parser/-/socket.io-parser-4.2.4.tgz", + "integrity": "sha512-/GbIKmo8ioc+NIWIhwdecY0ge+qVBSMdgxGygevmdHj24bsfgtCmcUUcQ5ZzcylGFHsN3k4HB4Cgkl96KVnuew==", + "dependencies": { + "@socket.io/component-emitter": "~3.1.0", + "debug": "~4.3.1" + }, + "engines": { + "node": ">=10.0.0" + } + }, "node_modules/source-map": { "version": "0.7.4", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz", @@ -25317,6 +25389,14 @@ "integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==", "dev": true }, + "node_modules/xmlhttprequest-ssl": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/xmlhttprequest-ssl/-/xmlhttprequest-ssl-2.1.2.tgz", + "integrity": "sha512-TEU+nJVUUnA4CYJFLvK5X9AOeH4KvDvhIfm0vV1GaQRtchnG0hgK5p8hw/xjv8cunWYCsiPCSDzObPyhEwq3KQ==", + "engines": { + "node": ">=0.4.0" + } + }, "node_modules/xtend": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", diff --git a/frontend/package.json b/frontend/package.json index 635daf05bda5..f37a2fc01a92 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -39,6 +39,7 @@ "react-textarea-autosize": "^8.5.4", "remark-gfm": "^4.0.0", "sirv-cli": "^3.0.0", + "socket.io-client": "^4.8.1", "tailwind-merge": "^2.5.4", "vite": "^5.4.9", "web-vitals": "^3.5.2", @@ -120,4 +121,4 @@ "public" ] } -} \ No newline at end of file +} diff --git a/frontend/src/context/ws-client-provider.tsx b/frontend/src/context/ws-client-provider.tsx index 4d00fd040e48..96df0b1ac44b 100644 --- a/frontend/src/context/ws-client-provider.tsx +++ b/frontend/src/context/ws-client-provider.tsx @@ -1,5 +1,6 @@ import posthog from "posthog-js"; import React from "react"; +import { io, Socket } from "socket.io-client"; import { Settings } from "#/services/settings"; import ActionType from "#/types/ActionType"; import EventLogger from "#/utils/event-logger"; @@ -43,23 +44,25 @@ export function WsClientProvider({ settings, children, }: React.PropsWithChildren) { - const wsRef = React.useRef(null); + const sioRef = React.useRef(null); const tokenRef = React.useRef(token); const ghTokenRef = React.useRef(ghToken); - const closeRef = React.useRef | null>(null); + const disconnectRef = React.useRef | null>( + null, + ); const [status, setStatus] = React.useState(WsClientProviderStatus.STOPPED); const [events, setEvents] = React.useState[]>([]); const [retryCount, setRetryCount] = React.useState(RECONNECT_RETRIES); function send(event: Record) { - if (!wsRef.current) { + if (!sioRef.current) { EventLogger.error("WebSocket is not connected."); return; } - wsRef.current.send(JSON.stringify(event)); + sioRef.current.emit("oh_action", event); } - function handleOpen() { + function handleConnect() { setRetryCount(RECONNECT_RETRIES); setStatus(WsClientProviderStatus.OPENING); const initEvent = { @@ -69,10 +72,10 @@ export function WsClientProvider({ send(initEvent); } - function handleMessage(messageEvent: MessageEvent) { - const event = JSON.parse(messageEvent.data); + function handleMessage(event: Record) { setEvents((prevEvents) => [...prevEvents, event]); - if (event.extras?.agent_state === AgentState.INIT) { + const extras = event.extras as Record; + if (extras?.agent_state === AgentState.INIT) { setStatus(WsClientProviderStatus.ACTIVE); } if ( @@ -85,7 +88,7 @@ export function WsClientProvider({ handleAssistantMessage(event); } - function handleClose() { + function handleDisconnect() { if (retryCount) { setTimeout(() => { setRetryCount(retryCount - 1); @@ -94,81 +97,88 @@ export function WsClientProvider({ setStatus(WsClientProviderStatus.STOPPED); setEvents([]); } - wsRef.current = null; + sioRef.current = null; } - function handleError(event: Event) { + function handleError() { posthog.capture("socket_error"); - EventLogger.event(event, "SOCKET ERROR"); setStatus(WsClientProviderStatus.ERROR); } // Connect websocket React.useEffect(() => { - let ws = wsRef.current; + let sio = sioRef.current; - // If disabled close any existing websockets... + // If disabled disconnect any existing websockets... if (!enabled || !retryCount) { - if (ws) { - ws.close(); + if (sio) { + sio.disconnect(); } - wsRef.current = null; + sioRef.current = null; return () => {}; } - // If there is no websocket or the tokens have changed or the current websocket is closed, + // If there is no websocket or the tokens have changed or the current websocket is disconnected, // create a new one if ( - !ws || + !sio || (tokenRef.current && token !== tokenRef.current) || ghToken !== ghTokenRef.current || - ws.readyState === WebSocket.CLOSED || - ws.readyState === WebSocket.CLOSING + !sio.connected ) { - ws?.close(); - const baseUrl = - import.meta.env.VITE_BACKEND_BASE_URL || window?.location.host; - const protocol = window.location.protocol === "https:" ? "wss:" : "ws:"; - let wsUrl = `${protocol}//${baseUrl}/ws`; + sio?.disconnect(); + + const extraHeaders: Record = {}; + if (token) { + extraHeaders.TOKEN = token; + } + if (ghToken) { + extraHeaders.GITHUB_TOKEN = ghToken; + } if (events.length) { - wsUrl += `?latest_event_id=${events[events.length - 1].id}`; + extraHeaders.LATEST_EVENT_ID = `${events[events.length - 1].id}`; } - ws = new WebSocket(wsUrl, [ - "openhands", - token || "NO_JWT", - ghToken || "NO_GITHUB", - ]); + + const baseUrl = + import.meta.env.VITE_BACKEND_BASE_URL || window?.location.host; + sio = io(`${window.location.protocol}//${baseUrl}`, { + transports: ["websocket"], + extraHeaders, + }); } - ws.addEventListener("open", handleOpen); - ws.addEventListener("message", handleMessage); - ws.addEventListener("error", handleError); - ws.addEventListener("close", handleClose); - wsRef.current = ws; + sio.on("connect", handleConnect); + sio.on("oh_event", handleMessage); + sio.on("connect_error", handleError); + sio.on("connect_failed", handleError); + sio.on("disconnect", handleDisconnect); + + sioRef.current = sio; tokenRef.current = token; ghTokenRef.current = ghToken; return () => { - ws.removeEventListener("open", handleOpen); - ws.removeEventListener("message", handleMessage); - ws.removeEventListener("error", handleError); - ws.removeEventListener("close", handleClose); + sio.off("connect", handleConnect); + sio.off("oh_event", handleMessage); + sio.off("connect_error", handleError); + sio.off("connect_failed", handleError); + sio.off("disconnect", handleDisconnect); }; }, [enabled, token, ghToken, retryCount]); // Strict mode mounts and unmounts each component twice, so we have to wait in the destructor - // before actually closing the socket and cancel the operation if the component gets remounted. + // before actually disconnecting the socket and cancel the operation if the component gets remounted. React.useEffect(() => { - const timeout = closeRef.current; + const timeout = disconnectRef.current; if (timeout != null) { clearTimeout(timeout); } return () => { - closeRef.current = setTimeout(() => { - const ws = wsRef.current; - if (ws) { - ws.removeEventListener("close", handleClose); - ws.close(); + disconnectRef.current = setTimeout(() => { + const sio = sioRef.current; + if (sio) { + sio.off("disconnect", handleDisconnect); + sio.disconnect(); } }, 100); }; From d4b20c284d4e547802675b1b4f8e033fdccf0199 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Wed, 13 Nov 2024 16:38:03 -0700 Subject: [PATCH 07/91] Now using socket io --- openhands/server/listen.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openhands/server/listen.py b/openhands/server/listen.py index 5349d961241c..dd769abe5155 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -982,13 +982,13 @@ async def connect(session_id: str, environ): @sio.event -async def oh_action(session_id: str, data: str): +async def oh_action(session_id: str, data: dict): logger.info(f"sio:oh_action:{session_id}") session = session_manager.get_existing_session(session_id) if session is None: raise ValueError(f'no_such_session_id:{session_id}') - await session.dispatch(json.loads(data)) + await session.dispatch(data) # session.on_event(event_from_dict(json.loads(data))) From ff7783ec81e0d0f565a37a5e8295e28b5350d158 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Thu, 14 Nov 2024 07:24:37 -0700 Subject: [PATCH 08/91] Removed retry - socketio does this anyway --- frontend/src/context/ws-client-provider.tsx | 16 ++++------------ 1 file changed, 4 insertions(+), 12 deletions(-) diff --git a/frontend/src/context/ws-client-provider.tsx b/frontend/src/context/ws-client-provider.tsx index 96df0b1ac44b..92615fc57eca 100644 --- a/frontend/src/context/ws-client-provider.tsx +++ b/frontend/src/context/ws-client-provider.tsx @@ -52,7 +52,6 @@ export function WsClientProvider({ ); const [status, setStatus] = React.useState(WsClientProviderStatus.STOPPED); const [events, setEvents] = React.useState[]>([]); - const [retryCount, setRetryCount] = React.useState(RECONNECT_RETRIES); function send(event: Record) { if (!sioRef.current) { @@ -63,7 +62,6 @@ export function WsClientProvider({ } function handleConnect() { - setRetryCount(RECONNECT_RETRIES); setStatus(WsClientProviderStatus.OPENING); const initEvent = { action: ActionType.INIT, @@ -89,14 +87,8 @@ export function WsClientProvider({ } function handleDisconnect() { - if (retryCount) { - setTimeout(() => { - setRetryCount(retryCount - 1); - }, 1000); - } else { - setStatus(WsClientProviderStatus.STOPPED); - setEvents([]); - } + setStatus(WsClientProviderStatus.STOPPED); + setEvents([]); sioRef.current = null; } @@ -110,7 +102,7 @@ export function WsClientProvider({ let sio = sioRef.current; // If disabled disconnect any existing websockets... - if (!enabled || !retryCount) { + if (!enabled) { if (sio) { sio.disconnect(); } @@ -163,7 +155,7 @@ export function WsClientProvider({ sio.off("connect_failed", handleError); sio.off("disconnect", handleDisconnect); }; - }, [enabled, token, ghToken, retryCount]); + }, [enabled, token, ghToken]); // Strict mode mounts and unmounts each component twice, so we have to wait in the destructor // before actually disconnecting the socket and cancel the operation if the component gets remounted. From b85fbf39fd23781119a00a0b6233964b6d48e541 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Thu, 14 Nov 2024 15:45:57 -0700 Subject: [PATCH 09/91] Tokens in init event --- frontend/src/context/ws-client-provider.tsx | 48 +++++++++++--- frontend/vite.config.ts | 1 + openhands/server/listen.py | 72 ++++++++++++++++++--- openhands/server/session/manager.py | 41 ++++++++---- openhands/server/session/session.py | 16 ++++- 5 files changed, 144 insertions(+), 34 deletions(-) diff --git a/frontend/src/context/ws-client-provider.tsx b/frontend/src/context/ws-client-provider.tsx index 92615fc57eca..0bb15bf8857e 100644 --- a/frontend/src/context/ws-client-provider.tsx +++ b/frontend/src/context/ws-client-provider.tsx @@ -63,6 +63,21 @@ export function WsClientProvider({ function handleConnect() { setStatus(WsClientProviderStatus.OPENING); + + // Send the init event that starts / reconnects a session... + const args: Record = { ...settings } + if (token) { + args.token = token + } + if (ghToken) { + args.github_token = ghToken; + } + if (events.length) { + extraHeaders.latest_event_id = `${events[events.length - 1].id}`; + } + */ + + const initEvent = { action: ActionType.INIT, args: settings, @@ -83,7 +98,9 @@ export function WsClientProvider({ setStatus(WsClientProviderStatus.ERROR); } - handleAssistantMessage(event); + if (!event.token) { + handleAssistantMessage(event); + } } function handleDisconnect() { @@ -93,6 +110,7 @@ export function WsClientProvider({ } function handleError() { + console.log("TRACE:SIO:Error") posthog.capture("socket_error"); setStatus(WsClientProviderStatus.ERROR); } @@ -106,7 +124,6 @@ export function WsClientProvider({ if (sio) { sio.disconnect(); } - sioRef.current = null; return () => {}; } @@ -115,27 +132,40 @@ export function WsClientProvider({ if ( !sio || (tokenRef.current && token !== tokenRef.current) || - ghToken !== ghTokenRef.current || - !sio.connected + ghToken !== ghTokenRef.current ) { sio?.disconnect(); + /* const extraHeaders: Record = {}; if (token) { - extraHeaders.TOKEN = token; + extraHeaders.token = token; } if (ghToken) { - extraHeaders.GITHUB_TOKEN = ghToken; + extraHeaders.github_token = ghToken; } if (events.length) { - extraHeaders.LATEST_EVENT_ID = `${events[events.length - 1].id}`; + extraHeaders.latest_event_id = `${events[events.length - 1].id}`; } + */ const baseUrl = import.meta.env.VITE_BACKEND_BASE_URL || window?.location.host; - sio = io(`${window.location.protocol}//${baseUrl}`, { + sio = io(baseUrl, { transports: ["websocket"], - extraHeaders, + //extraHeaders: { + // Testy: "TESTER" + //}, + // We force a new connection, because the headers may have changed. + //forceNew: true, + + // Had to do this for now because reconnection actually starts a new session, + // which we don't want - The reconnect has the same headers as the original + // which don't include the original session id + reconnection: false, + //reconnectionDelay: 1000, + //reconnectionDelayMax : 5000, + //reconnectionAttempts: 5 }); } sio.on("connect", handleConnect); diff --git a/frontend/vite.config.ts b/frontend/vite.config.ts index 1e963acd7127..373d2e834afc 100644 --- a/frontend/vite.config.ts +++ b/frontend/vite.config.ts @@ -87,6 +87,7 @@ export default defineConfig(({ mode }) => { ws: true, changeOrigin: true, secure: !INSECURE_SKIP_VERIFY, + //rewriteWsOrigin: true, } }, }, diff --git a/openhands/server/listen.py b/openhands/server/listen.py index dd769abe5155..2fbe6235c2f2 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -14,6 +14,7 @@ from pathspec.patterns import GitWildMatchPattern import socketio +from openhands.core.schema.action import ActionType from openhands.events.serialization.event import event_from_dict from openhands.security.options import SecurityAnalyzers from openhands.server.data_models.feedback import FeedbackDataModel, store_feedback @@ -940,9 +941,12 @@ async def get_response(self, path: str, scope): @sio.event -async def connect(session_id: str, environ): - logger.info(f"sio:connect: {session_id}") +async def connect(connection_id: str, environ): + logger.info(f"sio:connect: {connection_id}") + # Change this protocol. + # Init should now include the session id... + """ jwt_token = environ.get("HTTP_OH_TOKEN", '') if jwt_token: old_session_id = get_sid_from_token(jwt_token, config.jwt_secret) @@ -979,20 +983,68 @@ async def connect(session_id: str, environ): ): continue await session.send(event_to_dict(event)) + """ @sio.event -async def oh_action(session_id: str, data: dict): +async def oh_action(connection_id: str, data: dict): + + # If it's an init, we do it here. + action = data.get('action', '') + if action == ActionType.INIT: + await init_connection(connection_id, data) + return - logger.info(f"sio:oh_action:{session_id}") - session = session_manager.get_existing_session(session_id) - if session is None: - raise ValueError(f'no_such_session_id:{session_id}') + logger.info(f"sio:oh_action:{connection_id}") + session = session_manager.get_local_session(connection_id) await session.dispatch(data) # session.on_event(event_from_dict(json.loads(data))) + +async def init_connection(connection_id: str, data: dict): + gh_token = data.pop('gh_token', None) + if not await authenticate_github_user(gh_token): + raise RuntimeError(status.WS_1008_POLICY_VIOLATION) + + token = data.pop('token', None) + if token: + sid = get_sid_from_token(token, config.jwt_secret) + if sid == '': + sio.send({'error': 'Invalid token', 'error_code': 401}) + return + logger.info(f'Existing session: {sid}') + else: + sid = connection_id + logger.info(f'New session: {sid}') + + token = sign_token({'sid': sid}, config.jwt_secret) + await sio.emit("oh_event", {'token': token, 'status': 'ok'}, to=connection_id) + + latest_event_id = data.pop("latest_event_id", -1) + + # The session in question should exist, but may not actually be running locally... + session = await session_manager.init_or_join_local_session(sio, sid, connection_id, data) + + # Send events + async_stream = AsyncEventStreamWrapper( + session.agent_session.event_stream, latest_event_id + 1 + ) + async for event in async_stream: + if isinstance( + event, + ( + NullAction, + NullObservation, + ChangeAgentStateAction, + AgentStateChangedObservation, + ), + ): + continue + await sio.emit("oh_event", data, to=connection_id) + + @sio.event -def disconnect(sid): - logger.info(f'sio:disconnect:{sid}') - session_manager.stop_session(sid) +async def disconnect(connection_id: str): + logger.info(f'sio:disconnect:{connection_id}') + session_manager.disconnect_from_local_session(connection_id) diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index 7e6f10b5755e..a8d00b87cd9a 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -16,13 +16,14 @@ class SessionManager: config: AppConfig file_store: FileStore - sessions: dict[str, Session] = field(default_factory=dict) + local_sessions_by_sid: dict[str, Session] = field(default_factory=dict) + local_sessions_by_connection_id: dict[str, Session] = field(default_factory=dict) + # TODO: Delete me! def add_or_restart_session(self, sid: str, ws_conn: WebSocket) -> Session: session = Session( sid=sid, file_store=self.file_store, ws=ws_conn, config=self.config, sio=None ) - self.sessions[sid] = session return session async def attach_to_conversation(self, sid: str) -> Conversation | None: @@ -40,24 +41,36 @@ async def attach_to_conversation(self, sid: str) -> Conversation | None: async def detach_from_conversation(self, conversation: Conversation): await conversation.disconnect() + # TODO: Delete me! async def stop_session(self, sid: str) -> bool: session = self.sessions.pop(sid, None) if session: session.close() return bool(session) - def get_existing_session(self, sid: str): - return self.sessions.get(sid) - - def add_new_session(self, sio: socketio.AsyncServer | None, sid: str = None): - session = Session( - sid=sid, file_store=self.file_store, config=self.config, sio=sio, ws=None - ) - self.sessions[sid] = session + async def init_or_join_local_session(self, sio: socketio.AsyncServer, sid: str, connection_id: str, data: dict): + """ If there is no local session running, initialize one """ + if sid not in self.local_sessions_by_sid: + session = Session( + sid=sid, file_store=self.file_store, config=self.config, sio=sio, ws=None + ) + session.connect(connection_id) + self.local_sessions_by_sid[sid] = session + self.local_sessions_by_connection_id[connection_id] = session + await session.initialize_agent(data) + else: + session.connect(connection_id) + self.local_sessions_by_connection_id[connection_id] = session return session - def alias_existing_session(self, old_sid: str, new_sid: str): - session = self.sessions.pop(old_sid) + def get_local_session(self, connection_id: str) -> Session: + return self.local_sessions_by_connection_id[connection_id] + + def disconnect_from_local_session(self, connection_id: str): + session = self.local_sessions_by_connection_id.pop(connection_id, None) if not session: - raise RuntimeError(f'unknown_session:{old_sid}') - self.sessions[new_sid] = session + # This can occur if the init action was never run. + logger.warning(f'disconnect_from_uninitialized_session:{connection_id}') + return + if session.disconnect(connection_id): + self.local_sessions_by_sid.pop(session.sid) diff --git a/openhands/server/session/session.py b/openhands/server/session/session.py index 957c880e5e03..0d30ead89c2e 100644 --- a/openhands/server/session/session.py +++ b/openhands/server/session/session.py @@ -31,9 +31,11 @@ class Session: sid: str websocket: WebSocket | None sio: socketio.AsyncServer | None + connection_ids: set[str] last_active_ts: int = 0 is_alive: bool = True agent_session: AgentSession + # TODO: Delete me! loop: asyncio.AbstractEventLoop def __init__( @@ -50,12 +52,24 @@ def __init__( EventStreamSubscriber.SERVER, self.on_event, self.sid ) self.config = config + self.connection_ids = set() self.loop = asyncio.get_event_loop() + def connect(self, connection_id: str): + self.connection_ids.add(connection_id) + + def disconnect(self, connection_id: str) -> bool: + self.connection_ids.remove(connection_id) + if self.connection_ids: + return False + self.close() + return True + def close(self): self.is_alive = False self.agent_session.close() + # TODO: Delete me! async def loop_recv(self): try: if self.websocket is None: @@ -74,7 +88,7 @@ async def loop_recv(self): logger.exception('Error in loop_recv: %s', e) self.close() - async def _initialize_agent(self, data: dict): + async def initialize_agent(self, data: dict): self.agent_session.event_stream.add_event( ChangeAgentStateAction(AgentState.LOADING), EventSource.ENVIRONMENT ) From f0487e6818a3e2c3d21a5c5396d6501bbad406d5 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Thu, 14 Nov 2024 18:37:33 -0700 Subject: [PATCH 10/91] Server side code is working - now on the client side --- openhands/server/session/manager.py | 3 ++- openhands/server/session/session.py | 6 +++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index a8d00b87cd9a..4bdc06763596 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -50,7 +50,8 @@ async def stop_session(self, sid: str) -> bool: async def init_or_join_local_session(self, sio: socketio.AsyncServer, sid: str, connection_id: str, data: dict): """ If there is no local session running, initialize one """ - if sid not in self.local_sessions_by_sid: + session = self.local_sessions_by_sid.get(sid) + if not session: session = Session( sid=sid, file_store=self.file_store, config=self.config, sio=sio, ws=None ) diff --git a/openhands/server/session/session.py b/openhands/server/session/session.py index 0d30ead89c2e..8535f4ec0d81 100644 --- a/openhands/server/session/session.py +++ b/openhands/server/session/session.py @@ -24,6 +24,7 @@ from openhands.llm.llm import LLM from openhands.server.session.agent_session import AgentSession from openhands.storage.files import FileStore +from openhands.utils.async_utils import wait_all from openhands.utils.shutdown_listener import should_continue @@ -202,7 +203,10 @@ async def send(self, data: dict[str, object]) -> bool: if self.websocket: await self.websocket.send_json(data) if self.sio: - await self.sio.emit("oh_event", data, to=self.sid) + await wait_all( + self.sio.emit("oh_event", data, to=connection_id) + for connection_id in self.connection_ids + ) await asyncio.sleep(0.001) # This flushes the data to the client self.last_active_ts = int(time.time()) return True From b2bd23ac860572135d8b0749a29353c709d6eda8 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Thu, 14 Nov 2024 18:46:13 -0700 Subject: [PATCH 11/91] Fix for test errors - client side seems to work --- frontend/src/context/ws-client-provider.tsx | 38 +++++++++------------ 1 file changed, 16 insertions(+), 22 deletions(-) diff --git a/frontend/src/context/ws-client-provider.tsx b/frontend/src/context/ws-client-provider.tsx index 0bb15bf8857e..7ab452cbf3dc 100644 --- a/frontend/src/context/ws-client-provider.tsx +++ b/frontend/src/context/ws-client-provider.tsx @@ -7,8 +7,6 @@ import EventLogger from "#/utils/event-logger"; import AgentState from "#/types/AgentState"; import { handleAssistantMessage } from "#/services/actions"; -const RECONNECT_RETRIES = 5; - export enum WsClientProviderStatus { STOPPED, OPENING, @@ -64,24 +62,19 @@ export function WsClientProvider({ function handleConnect() { setStatus(WsClientProviderStatus.OPENING); - // Send the init event that starts / reconnects a session... - const args: Record = { ...settings } + const initEvent: Record = { + action: ActionType.INIT, + args: settings, + }; if (token) { - args.token = token + initEvent.token = token; } if (ghToken) { - args.github_token = ghToken; + initEvent.github_token = ghToken; } if (events.length) { - extraHeaders.latest_event_id = `${events[events.length - 1].id}`; + initEvent.latest_event_id = `${events[events.length - 1].id}`; } - */ - - - const initEvent = { - action: ActionType.INIT, - args: settings, - }; send(initEvent); } @@ -110,9 +103,10 @@ export function WsClientProvider({ } function handleError() { - console.log("TRACE:SIO:Error") + console.log("TRACE:SIO:Error"); posthog.capture("socket_error"); setStatus(WsClientProviderStatus.ERROR); + sioRef.current?.disconnect(); } // Connect websocket @@ -153,19 +147,19 @@ export function WsClientProvider({ import.meta.env.VITE_BACKEND_BASE_URL || window?.location.host; sio = io(baseUrl, { transports: ["websocket"], - //extraHeaders: { + // extraHeaders: { // Testy: "TESTER" - //}, + // }, // We force a new connection, because the headers may have changed. - //forceNew: true, - + // forceNew: true, + // Had to do this for now because reconnection actually starts a new session, // which we don't want - The reconnect has the same headers as the original // which don't include the original session id reconnection: false, - //reconnectionDelay: 1000, - //reconnectionDelayMax : 5000, - //reconnectionAttempts: 5 + // reconnectionDelay: 1000, + // reconnectionDelayMax : 5000, + // reconnectionAttempts: 5 }); } sio.on("connect", handleConnect); From 767878563e6014b13ac41fd4996448ca1a5d6756 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 15 Nov 2024 07:15:21 -0700 Subject: [PATCH 12/91] WIP --- frontend/src/context/ws-client-provider.tsx | 3 ++- openhands/server/listen.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/frontend/src/context/ws-client-provider.tsx b/frontend/src/context/ws-client-provider.tsx index 7ab452cbf3dc..e20512b29dbb 100644 --- a/frontend/src/context/ws-client-provider.tsx +++ b/frontend/src/context/ws-client-provider.tsx @@ -60,6 +60,7 @@ export function WsClientProvider({ } function handleConnect() { + console.log("TRACE:connect"); setStatus(WsClientProviderStatus.OPENING); const initEvent: Record = { @@ -156,7 +157,7 @@ export function WsClientProvider({ // Had to do this for now because reconnection actually starts a new session, // which we don't want - The reconnect has the same headers as the original // which don't include the original session id - reconnection: false, + // reconnection: false, // reconnectionDelay: 1000, // reconnectionDelayMax : 5000, // reconnectionAttempts: 5 diff --git a/openhands/server/listen.py b/openhands/server/listen.py index 2fbe6235c2f2..3eee66edd3df 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -1011,7 +1011,7 @@ async def init_connection(connection_id: str, data: dict): if token: sid = get_sid_from_token(token, config.jwt_secret) if sid == '': - sio.send({'error': 'Invalid token', 'error_code': 401}) + await sio.send({'error': 'Invalid token', 'error_code': 401}) return logger.info(f'Existing session: {sid}') else: From a1b53a94987beb9fa56efcd6affbca20af439a72 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 15 Nov 2024 07:39:30 -0700 Subject: [PATCH 13/91] Culled dead code --- openhands/server/listen.py | 171 +++++++++------------------- openhands/server/session/manager.py | 14 --- openhands/server/session/session.py | 26 +---- 3 files changed, 56 insertions(+), 155 deletions(-) diff --git a/openhands/server/listen.py b/openhands/server/listen.py index 3eee66edd3df..14b5392af0bb 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -255,122 +255,6 @@ async def attach_session(request: Request, call_next): return response -@app.websocket('/ws') -async def websocket_endpoint(websocket: WebSocket): - """WebSocket endpoint for receiving events from the client (i.e., the browser). - Once connected, the client can send various actions: - - Initialize the agent: - session management, and event streaming. - ```json - {"action": "initialize", "args": {"LLM_MODEL": "ollama/llama3", "AGENT": "CodeActAgent", "LANGUAGE": "en", "LLM_API_KEY": "ollama"}} - - Args: - ``` - websocket (WebSocket): The WebSocket connection object. - - Start a new development task: - ```json - {"action": "start", "args": {"task": "write a bash script that prints hello"}} - ``` - - Send a message: - ```json - {"action": "message", "args": {"content": "Hello, how are you?", "image_urls": ["base64_url1", "base64_url2"]}} - ``` - - Write contents to a file: - ```json - {"action": "write", "args": {"path": "./greetings.txt", "content": "Hello, OpenHands?"}} - ``` - - Read the contents of a file: - ```json - {"action": "read", "args": {"path": "./greetings.txt"}} - ``` - - Run a command: - ```json - {"action": "run", "args": {"command": "ls -l", "thought": "", "confirmation_state": "confirmed"}} - ``` - - Run an IPython command: - ```json - {"action": "run_ipython", "args": {"command": "print('Hello, IPython!')"}} - ``` - - Open a web page: - ```json - {"action": "browse", "args": {"url": "https://arxiv.org/html/2402.01030v2"}} - ``` - - Add a task to the root_task: - ```json - {"action": "add_task", "args": {"task": "Implement feature X"}} - ``` - - Update a task in the root_task: - ```json - {"action": "modify_task", "args": {"id": "0", "state": "in_progress", "thought": ""}} - ``` - - Change the agent's state: - ```json - {"action": "change_agent_state", "args": {"state": "paused"}} - ``` - - Finish the task: - ```json - {"action": "finish", "args": {}} - ``` - """ - # Get protocols from Sec-WebSocket-Protocol header - protocols = websocket.headers.get('sec-websocket-protocol', '').split(', ') - - # The first protocol should be our real protocol (e.g. 'openhands') - # The second protocol should contain our auth token - if len(protocols) < 3: - logger.error('Expected 3 websocket protocols, got %d', len(protocols)) - await websocket.close(code=status.WS_1008_POLICY_VIOLATION) - return - - real_protocol = protocols[0] - jwt_token = protocols[1] if protocols[1] != 'NO_JWT' else '' - github_token = protocols[2] if protocols[2] != 'NO_GITHUB' else '' - - if not await authenticate_github_user(github_token): - await websocket.close(code=status.WS_1008_POLICY_VIOLATION) - return - - await asyncio.wait_for(websocket.accept(subprotocol=real_protocol), 10) - - if jwt_token: - sid = get_sid_from_token(jwt_token, config.jwt_secret) - - if sid == '': - await websocket.send_json({'error': 'Invalid token', 'error_code': 401}) - await websocket.close() - return - else: - sid = str(uuid.uuid4()) - jwt_token = sign_token({'sid': sid}, config.jwt_secret) - - logger.info(f'New session: {sid}') - session = session_manager.add_or_restart_session(sid, websocket) - await websocket.send_json({'token': jwt_token, 'status': 'ok'}) - - latest_event_id = -1 - if websocket.query_params.get('latest_event_id'): - latest_event_id = int(websocket.query_params.get('latest_event_id')) - - async_stream = AsyncEventStreamWrapper( - session.agent_session.event_stream, latest_event_id + 1 - ) - - async for event in async_stream: - if isinstance( - event, - ( - NullAction, - NullObservation, - ChangeAgentStateAction, - AgentStateChangedObservation, - ), - ): - continue - await websocket.send_json(event_to_dict(event)) - - await session.loop_recv() - - @app.get('/api/options/models') async def get_litellm_models() -> list[str]: """ @@ -988,6 +872,61 @@ async def connect(connection_id: str, environ): @sio.event async def oh_action(connection_id: str, data: dict): + """WebSocket endpoint for receiving events from the client (i.e., the browser). + Once connected, the client can send various actions: + - Initialize the agent: + session management, and event streaming. + ```json + {"action": "initialize", "args": {"LLM_MODEL": "ollama/llama3", "AGENT": "CodeActAgent", "LANGUAGE": "en", "LLM_API_KEY": "ollama"}} + + Args: + ``` + websocket (WebSocket): The WebSocket connection object. + - Start a new development task: + ```json + {"action": "start", "args": {"task": "write a bash script that prints hello"}} + ``` + - Send a message: + ```json + {"action": "message", "args": {"content": "Hello, how are you?", "image_urls": ["base64_url1", "base64_url2"]}} + ``` + - Write contents to a file: + ```json + {"action": "write", "args": {"path": "./greetings.txt", "content": "Hello, OpenHands?"}} + ``` + - Read the contents of a file: + ```json + {"action": "read", "args": {"path": "./greetings.txt"}} + ``` + - Run a command: + ```json + {"action": "run", "args": {"command": "ls -l", "thought": "", "confirmation_state": "confirmed"}} + ``` + - Run an IPython command: + ```json + {"action": "run_ipython", "args": {"command": "print('Hello, IPython!')"}} + ``` + - Open a web page: + ```json + {"action": "browse", "args": {"url": "https://arxiv.org/html/2402.01030v2"}} + ``` + - Add a task to the root_task: + ```json + {"action": "add_task", "args": {"task": "Implement feature X"}} + ``` + - Update a task in the root_task: + ```json + {"action": "modify_task", "args": {"id": "0", "state": "in_progress", "thought": ""}} + ``` + - Change the agent's state: + ```json + {"action": "change_agent_state", "args": {"state": "paused"}} + ``` + - Finish the task: + ```json + {"action": "finish", "args": {}} + ``` + """ # If it's an init, we do it here. action = data.get('action', '') diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index 4bdc06763596..9bd5ea11f17f 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -19,13 +19,6 @@ class SessionManager: local_sessions_by_sid: dict[str, Session] = field(default_factory=dict) local_sessions_by_connection_id: dict[str, Session] = field(default_factory=dict) - # TODO: Delete me! - def add_or_restart_session(self, sid: str, ws_conn: WebSocket) -> Session: - session = Session( - sid=sid, file_store=self.file_store, ws=ws_conn, config=self.config, sio=None - ) - return session - async def attach_to_conversation(self, sid: str) -> Conversation | None: start_time = time.time() if not await session_exists(sid, self.file_store): @@ -41,13 +34,6 @@ async def attach_to_conversation(self, sid: str) -> Conversation | None: async def detach_from_conversation(self, conversation: Conversation): await conversation.disconnect() - # TODO: Delete me! - async def stop_session(self, sid: str) -> bool: - session = self.sessions.pop(sid, None) - if session: - session.close() - return bool(session) - async def init_or_join_local_session(self, sio: socketio.AsyncServer, sid: str, connection_id: str, data: dict): """ If there is no local session running, initialize one """ session = self.local_sessions_by_sid.get(sid) diff --git a/openhands/server/session/session.py b/openhands/server/session/session.py index 8535f4ec0d81..edccc717dda9 100644 --- a/openhands/server/session/session.py +++ b/openhands/server/session/session.py @@ -36,8 +36,6 @@ class Session: last_active_ts: int = 0 is_alive: bool = True agent_session: AgentSession - # TODO: Delete me! - loop: asyncio.AbstractEventLoop def __init__( self, sid: str, ws: WebSocket | None, config: AppConfig, file_store: FileStore, sio: socketio.AsyncServer | None @@ -54,7 +52,6 @@ def __init__( ) self.config = config self.connection_ids = set() - self.loop = asyncio.get_event_loop() def connect(self, connection_id: str): self.connection_ids.add(connection_id) @@ -70,25 +67,6 @@ def close(self): self.is_alive = False self.agent_session.close() - # TODO: Delete me! - async def loop_recv(self): - try: - if self.websocket is None: - return - while should_continue(): - try: - data = await self.websocket.receive_json() - except ValueError: - await self.send_error('Invalid JSON') - continue - await self.dispatch(data) - except WebSocketDisconnect: - logger.info('WebSocket disconnected, sid: %s', self.sid) - self.close() - except RuntimeError as e: - logger.exception('Error in loop_recv: %s', e) - self.close() - async def initialize_agent(self, data: dict): self.agent_session.event_stream.add_event( ChangeAgentStateAction(AgentState.LOADING), EventSource.ENVIRONMENT @@ -229,6 +207,4 @@ async def _send_status_message(self, msg_type: str, id: str, message: str) -> bo def queue_status_message(self, msg_type: str, id: str, message: str): """Queues a status message to be sent asynchronously.""" - asyncio.run_coroutine_threadsafe( - self._send_status_message(msg_type, id, message), self.loop - ) + asyncio.create_task(self._send_status_message(msg_type, id, message)) \ No newline at end of file From 511ce8cc3a9262c05c3d720aacbf604ff28c12ed Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 15 Nov 2024 08:29:51 -0700 Subject: [PATCH 14/91] Fixed closing --- openhands/server/listen.py | 45 +---------------------------- openhands/server/session/manager.py | 15 ++++++++-- openhands/server/session/session.py | 11 +++---- 3 files changed, 20 insertions(+), 51 deletions(-) diff --git a/openhands/server/listen.py b/openhands/server/listen.py index 14b5392af0bb..edd30b2056a2 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -828,47 +828,6 @@ async def get_response(self, path: str, scope): async def connect(connection_id: str, environ): logger.info(f"sio:connect: {connection_id}") - # Change this protocol. - # Init should now include the session id... - """ - jwt_token = environ.get("HTTP_OH_TOKEN", '') - if jwt_token: - old_session_id = get_sid_from_token(jwt_token, config.jwt_secret) - if old_session_id == '': - sio.send({'error': 'Invalid token', 'error_code': 401}) - return - logger.info(f'Renaming existing session: {old_session_id} to {session_id}') - session = session_manager.alias_existing_session(old_session_id, session_id) - else: - jwt_token = sign_token({'sid': session_id}, config.jwt_secret) - logger.info(f'New session: {session_id}') - session = session_manager.add_new_session(sio, session_id) - - github_token = environ.get('HTTP_GITHUB_TOKEN', '') - if not await authenticate_github_user(github_token): - raise RuntimeError(status.WS_1008_POLICY_VIOLATION) - - await session.send({'token': jwt_token, 'status': 'ok'}) - - latest_event_id = int(environ.get('HTTP_LATEST_EVENT_ID', -1)) - async_stream = AsyncEventStreamWrapper( - session.agent_session.event_stream, latest_event_id + 1 - ) - - async for event in async_stream: - if isinstance( - event, - ( - NullAction, - NullObservation, - ChangeAgentStateAction, - AgentStateChangedObservation, - ), - ): - continue - await session.send(event_to_dict(event)) - """ - @sio.event async def oh_action(connection_id: str, data: dict): @@ -937,8 +896,6 @@ async def oh_action(connection_id: str, data: dict): logger.info(f"sio:oh_action:{connection_id}") session = session_manager.get_local_session(connection_id) await session.dispatch(data) - # session.on_event(event_from_dict(json.loads(data))) - async def init_connection(connection_id: str, data: dict): @@ -986,4 +943,4 @@ async def init_connection(connection_id: str, data: dict): @sio.event async def disconnect(connection_id: str): logger.info(f'sio:disconnect:{connection_id}') - session_manager.disconnect_from_local_session(connection_id) + await session_manager.disconnect_from_local_session(connection_id) diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index 9bd5ea11f17f..03ea022bb1c2 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -1,3 +1,4 @@ +import asyncio import time from dataclasses import dataclass, field @@ -53,11 +54,21 @@ async def init_or_join_local_session(self, sio: socketio.AsyncServer, sid: str, def get_local_session(self, connection_id: str) -> Session: return self.local_sessions_by_connection_id[connection_id] - def disconnect_from_local_session(self, connection_id: str): + async def disconnect_from_local_session(self, connection_id: str): session = self.local_sessions_by_connection_id.pop(connection_id, None) if not session: # This can occur if the init action was never run. logger.warning(f'disconnect_from_uninitialized_session:{connection_id}') return if session.disconnect(connection_id): - self.local_sessions_by_sid.pop(session.sid) + asyncio.create_task(self._check_and_close_session(session)) + + async def _check_and_close_session(self, session: Session): + # Once there have been no connections to a session for a reasonable period, we close it + try: + await asyncio.sleep(15) + finally: + # If the sleep was cancelled, we still want to close these + if not session.connection_ids: + session.close() + self.local_sessions_by_sid.pop(session.sid) \ No newline at end of file diff --git a/openhands/server/session/session.py b/openhands/server/session/session.py index edccc717dda9..9a33caf57d44 100644 --- a/openhands/server/session/session.py +++ b/openhands/server/session/session.py @@ -36,6 +36,7 @@ class Session: last_active_ts: int = 0 is_alive: bool = True agent_session: AgentSession + loop: asyncio.AbstractEventLoop def __init__( self, sid: str, ws: WebSocket | None, config: AppConfig, file_store: FileStore, sio: socketio.AsyncServer | None @@ -52,16 +53,14 @@ def __init__( ) self.config = config self.connection_ids = set() + self.loop = asyncio.get_event_loop() def connect(self, connection_id: str): self.connection_ids.add(connection_id) def disconnect(self, connection_id: str) -> bool: self.connection_ids.remove(connection_id) - if self.connection_ids: - return False - self.close() - return True + return not self.connection_ids def close(self): self.is_alive = False @@ -207,4 +206,6 @@ async def _send_status_message(self, msg_type: str, id: str, message: str) -> bo def queue_status_message(self, msg_type: str, id: str, message: str): """Queues a status message to be sent asynchronously.""" - asyncio.create_task(self._send_status_message(msg_type, id, message)) \ No newline at end of file + asyncio.run_coroutine_threadsafe( + self._send_status_message(msg_type, id, message), self.loop + ) From 3f0e619997dc39da56edef52967c785514d206a9 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 15 Nov 2024 08:52:12 -0700 Subject: [PATCH 15/91] WIP --- openhands/server/listen.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openhands/server/listen.py b/openhands/server/listen.py index edd30b2056a2..704a0ba6c06a 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -937,7 +937,7 @@ async def init_connection(connection_id: str, data: dict): ), ): continue - await sio.emit("oh_event", data, to=connection_id) + await sio.emit("oh_event", event_to_dict(event), to=connection_id) @sio.event From 2aa6bab85f46348bf07aead5324c1d0b51f0852e Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 15 Nov 2024 09:27:44 -0700 Subject: [PATCH 16/91] Fix initial startup --- frontend/src/context/ws-client-provider.tsx | 15 --------------- openhands/server/listen.py | 1 - 2 files changed, 16 deletions(-) diff --git a/frontend/src/context/ws-client-provider.tsx b/frontend/src/context/ws-client-provider.tsx index e20512b29dbb..1e0e402857b0 100644 --- a/frontend/src/context/ws-client-provider.tsx +++ b/frontend/src/context/ws-client-provider.tsx @@ -60,7 +60,6 @@ export function WsClientProvider({ } function handleConnect() { - console.log("TRACE:connect"); setStatus(WsClientProviderStatus.OPENING); const initEvent: Record = { @@ -104,7 +103,6 @@ export function WsClientProvider({ } function handleError() { - console.log("TRACE:SIO:Error"); posthog.capture("socket_error"); setStatus(WsClientProviderStatus.ERROR); sioRef.current?.disconnect(); @@ -131,19 +129,6 @@ export function WsClientProvider({ ) { sio?.disconnect(); - /* - const extraHeaders: Record = {}; - if (token) { - extraHeaders.token = token; - } - if (ghToken) { - extraHeaders.github_token = ghToken; - } - if (events.length) { - extraHeaders.latest_event_id = `${events[events.length - 1].id}`; - } - */ - const baseUrl = import.meta.env.VITE_BACKEND_BASE_URL || window?.location.host; sio = io(baseUrl, { diff --git a/openhands/server/listen.py b/openhands/server/listen.py index 704a0ba6c06a..239767978de1 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -933,7 +933,6 @@ async def init_connection(connection_id: str, data: dict): NullAction, NullObservation, ChangeAgentStateAction, - AgentStateChangedObservation, ), ): continue From 551a5a72e1a672296073112d9fefee2bd731926b Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 15 Nov 2024 09:55:39 -0700 Subject: [PATCH 17/91] Client side error fix --- frontend/src/context/ws-client-provider.tsx | 15 +++++++-------- openhands/server/listen.py | 2 +- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/frontend/src/context/ws-client-provider.tsx b/frontend/src/context/ws-client-provider.tsx index 1e0e402857b0..fd24fead6bda 100644 --- a/frontend/src/context/ws-client-provider.tsx +++ b/frontend/src/context/ws-client-provider.tsx @@ -4,7 +4,6 @@ import { io, Socket } from "socket.io-client"; import { Settings } from "#/services/settings"; import ActionType from "#/types/ActionType"; import EventLogger from "#/utils/event-logger"; -import AgentState from "#/types/AgentState"; import { handleAssistantMessage } from "#/services/actions"; export enum WsClientProviderStatus { @@ -73,6 +72,7 @@ export function WsClientProvider({ initEvent.github_token = ghToken; } if (events.length) { + // Wrong. Events is out of sync here... initEvent.latest_event_id = `${events[events.length - 1].id}`; } send(initEvent); @@ -81,9 +81,6 @@ export function WsClientProvider({ function handleMessage(event: Record) { setEvents((prevEvents) => [...prevEvents, event]); const extras = event.extras as Record; - if (extras?.agent_state === AgentState.INIT) { - setStatus(WsClientProviderStatus.ACTIVE); - } if ( status !== WsClientProviderStatus.ACTIVE && event?.observation === "error" @@ -91,15 +88,17 @@ export function WsClientProvider({ setStatus(WsClientProviderStatus.ERROR); } - if (!event.token) { + if (event.token) { + setStatus(WsClientProviderStatus.ACTIVE); + } else { handleAssistantMessage(event); } } function handleDisconnect() { setStatus(WsClientProviderStatus.STOPPED); - setEvents([]); - sioRef.current = null; + // setEvents([]); + // sioRef.current = null; } function handleError() { @@ -165,7 +164,7 @@ export function WsClientProvider({ sio.off("connect_failed", handleError); sio.off("disconnect", handleDisconnect); }; - }, [enabled, token, ghToken]); + }, [enabled, token, ghToken, events]); // Strict mode mounts and unmounts each component twice, so we have to wait in the destructor // before actually disconnecting the socket and cancel the operation if the component gets remounted. diff --git a/openhands/server/listen.py b/openhands/server/listen.py index 239767978de1..c034828a985f 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -917,7 +917,7 @@ async def init_connection(connection_id: str, data: dict): token = sign_token({'sid': sid}, config.jwt_secret) await sio.emit("oh_event", {'token': token, 'status': 'ok'}, to=connection_id) - latest_event_id = data.pop("latest_event_id", -1) + latest_event_id = int(data.pop("latest_event_id", -1)) # The session in question should exist, but may not actually be running locally... session = await session_manager.init_or_join_local_session(sio, sid, connection_id, data) From 91447e0098b7a07b38d562b1e24f34ddfd4b4b46 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 15 Nov 2024 10:46:56 -0700 Subject: [PATCH 18/91] Revert previous fix --- frontend/src/context/ws-client-provider.tsx | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/frontend/src/context/ws-client-provider.tsx b/frontend/src/context/ws-client-provider.tsx index fd24fead6bda..db5d1580dad3 100644 --- a/frontend/src/context/ws-client-provider.tsx +++ b/frontend/src/context/ws-client-provider.tsx @@ -4,6 +4,7 @@ import { io, Socket } from "socket.io-client"; import { Settings } from "#/services/settings"; import ActionType from "#/types/ActionType"; import EventLogger from "#/utils/event-logger"; +import AgentState from "#/types/AgentState"; import { handleAssistantMessage } from "#/services/actions"; export enum WsClientProviderStatus { @@ -72,7 +73,8 @@ export function WsClientProvider({ initEvent.github_token = ghToken; } if (events.length) { - // Wrong. Events is out of sync here... + console.log("TRACE"); + // Wrong. Events is out of sync initEvent.latest_event_id = `${events[events.length - 1].id}`; } send(initEvent); @@ -81,6 +83,9 @@ export function WsClientProvider({ function handleMessage(event: Record) { setEvents((prevEvents) => [...prevEvents, event]); const extras = event.extras as Record; + if (extras?.agent_state === AgentState.INIT) { + setStatus(WsClientProviderStatus.ACTIVE); + } if ( status !== WsClientProviderStatus.ACTIVE && event?.observation === "error" @@ -88,9 +93,7 @@ export function WsClientProvider({ setStatus(WsClientProviderStatus.ERROR); } - if (event.token) { - setStatus(WsClientProviderStatus.ACTIVE); - } else { + if (!event.token) { handleAssistantMessage(event); } } @@ -164,7 +167,7 @@ export function WsClientProvider({ sio.off("connect_failed", handleError); sio.off("disconnect", handleDisconnect); }; - }, [enabled, token, ghToken, events]); + }, [enabled, token, ghToken]); // Strict mode mounts and unmounts each component twice, so we have to wait in the destructor // before actually disconnecting the socket and cancel the operation if the component gets remounted. From 55da1ca2d7e0810ab31c5fec1dd69108c2245b67 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 15 Nov 2024 10:48:26 -0700 Subject: [PATCH 19/91] Removed comment --- frontend/src/context/ws-client-provider.tsx | 2 -- 1 file changed, 2 deletions(-) diff --git a/frontend/src/context/ws-client-provider.tsx b/frontend/src/context/ws-client-provider.tsx index db5d1580dad3..f8ce9a6dbfa3 100644 --- a/frontend/src/context/ws-client-provider.tsx +++ b/frontend/src/context/ws-client-provider.tsx @@ -73,8 +73,6 @@ export function WsClientProvider({ initEvent.github_token = ghToken; } if (events.length) { - console.log("TRACE"); - // Wrong. Events is out of sync initEvent.latest_event_id = `${events[events.length - 1].id}`; } send(initEvent); From c06f9ceb8d69b61c0315b8ebd96afc02fa8b630e Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 15 Nov 2024 11:33:44 -0700 Subject: [PATCH 20/91] Frontend fixes --- frontend/src/context/ws-client-provider.tsx | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/frontend/src/context/ws-client-provider.tsx b/frontend/src/context/ws-client-provider.tsx index f8ce9a6dbfa3..7fee8161670e 100644 --- a/frontend/src/context/ws-client-provider.tsx +++ b/frontend/src/context/ws-client-provider.tsx @@ -50,6 +50,7 @@ export function WsClientProvider({ ); const [status, setStatus] = React.useState(WsClientProviderStatus.STOPPED); const [events, setEvents] = React.useState[]>([]); + const lastEventRef = React.useRef | null>(null) function send(event: Record) { if (!sioRef.current) { @@ -72,40 +73,39 @@ export function WsClientProvider({ if (ghToken) { initEvent.github_token = ghToken; } - if (events.length) { - initEvent.latest_event_id = `${events[events.length - 1].id}`; + const lastEvent = lastEventRef.current + if (lastEvent) { + initEvent.latest_event_id = lastEvent.id; } send(initEvent); } function handleMessage(event: Record) { setEvents((prevEvents) => [...prevEvents, event]); + lastEventRef.current = event; const extras = event.extras as Record; - if (extras?.agent_state === AgentState.INIT) { - setStatus(WsClientProviderStatus.ACTIVE); - } if ( status !== WsClientProviderStatus.ACTIVE && event?.observation === "error" ) { setStatus(WsClientProviderStatus.ERROR); + return } - if (!event.token) { + if (event.token) { + setStatus(WsClientProviderStatus.ACTIVE); + } else { handleAssistantMessage(event); } } function handleDisconnect() { setStatus(WsClientProviderStatus.STOPPED); - // setEvents([]); - // sioRef.current = null; } function handleError() { posthog.capture("socket_error"); setStatus(WsClientProviderStatus.ERROR); - sioRef.current?.disconnect(); } // Connect websocket @@ -142,7 +142,7 @@ export function WsClientProvider({ // Had to do this for now because reconnection actually starts a new session, // which we don't want - The reconnect has the same headers as the original // which don't include the original session id - // reconnection: false, + // reconnection: true, // reconnectionDelay: 1000, // reconnectionDelayMax : 5000, // reconnectionAttempts: 5 From 9cf4f00ec74fe17adb293f995a8a7e905b68a151 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 15 Nov 2024 13:05:21 -0700 Subject: [PATCH 21/91] More reconnection fixes --- frontend/src/context/ws-client-provider.tsx | 12 ++++++++---- openhands/core/config/sandbox_config.py | 1 + .../runtime/impl/eventstream/eventstream_runtime.py | 5 +++-- openhands/server/listen.py | 2 -- openhands/server/session/agent_session.py | 1 + openhands/server/session/manager.py | 10 +++++++++- openhands/server/session/session.py | 6 +----- 7 files changed, 23 insertions(+), 14 deletions(-) diff --git a/frontend/src/context/ws-client-provider.tsx b/frontend/src/context/ws-client-provider.tsx index 7fee8161670e..840c3af520cd 100644 --- a/frontend/src/context/ws-client-provider.tsx +++ b/frontend/src/context/ws-client-provider.tsx @@ -61,6 +61,7 @@ export function WsClientProvider({ } function handleConnect() { + console.log("TRACE:SIO:SET_STATUS:OPENING"); setStatus(WsClientProviderStatus.OPENING); const initEvent: Record = { @@ -74,7 +75,7 @@ export function WsClientProvider({ initEvent.github_token = ghToken; } const lastEvent = lastEventRef.current - if (lastEvent) { + if (lastEvent && !isNaN(parseInt(lastEvent.id as string))) { initEvent.latest_event_id = lastEvent.id; } send(initEvent); @@ -84,6 +85,9 @@ export function WsClientProvider({ setEvents((prevEvents) => [...prevEvents, event]); lastEventRef.current = event; const extras = event.extras as Record; + if (extras?.agent_state === AgentState.INIT) { + setStatus(WsClientProviderStatus.ACTIVE); + } if ( status !== WsClientProviderStatus.ACTIVE && event?.observation === "error" @@ -92,9 +96,9 @@ export function WsClientProvider({ return } - if (event.token) { - setStatus(WsClientProviderStatus.ACTIVE); - } else { + if (!event.token) { + //setStatus(WsClientProviderStatus.ACTIVE); + //} else { handleAssistantMessage(event); } } diff --git a/openhands/core/config/sandbox_config.py b/openhands/core/config/sandbox_config.py index 57f4b189b182..bc6a84622ce5 100644 --- a/openhands/core/config/sandbox_config.py +++ b/openhands/core/config/sandbox_config.py @@ -37,6 +37,7 @@ class SandboxConfig: remote_runtime_api_url: str = 'http://localhost:8000' local_runtime_url: str = 'http://localhost' keep_runtime_alive: bool = True + rm_all_containers: bool = False api_key: str | None = None base_container_image: str = 'nikolaik/python-nodejs:python3.12-nodejs22' # default to nikolaik/python-nodejs:python3.12-nodejs22 for eventstream runtime runtime_container_image: str | None = None diff --git a/openhands/runtime/impl/eventstream/eventstream_runtime.py b/openhands/runtime/impl/eventstream/eventstream_runtime.py index 77cbaf338281..3cc8c52b9a47 100644 --- a/openhands/runtime/impl/eventstream/eventstream_runtime.py +++ b/openhands/runtime/impl/eventstream/eventstream_runtime.py @@ -172,6 +172,7 @@ def __init__( self.docker_client: docker.DockerClient = self._init_docker_client() self.base_container_image = self.config.sandbox.base_container_image self.runtime_container_image = self.config.sandbox.runtime_container_image + self.rm_all_containers = self.config.sandbox.rm_all_containers self.container_name = CONTAINER_NAME_PREFIX + sid self.container = None self.action_semaphore = threading.Semaphore(1) # Ensure one action at a time @@ -437,7 +438,7 @@ def _wait_until_alive(self): timeout=5, ) - def close(self, rm_all_containers: bool = True): + def close(self): """Closes the EventStreamRuntime and associated objects Parameters: @@ -452,7 +453,7 @@ def close(self, rm_all_containers: bool = True): if self.config.sandbox.keep_runtime_alive or self.attach_to_existing: return close_prefix = ( - CONTAINER_NAME_PREFIX if rm_all_containers else self.container_name + CONTAINER_NAME_PREFIX if self.rm_all_containers else self.container_name ) remove_all_containers(close_prefix) diff --git a/openhands/server/listen.py b/openhands/server/listen.py index c034828a985f..c7aa77c20620 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -15,7 +15,6 @@ import socketio from openhands.core.schema.action import ActionType -from openhands.events.serialization.event import event_from_dict from openhands.security.options import SecurityAnalyzers from openhands.server.data_models.feedback import FeedbackDataModel, store_feedback from openhands.server.github import ( @@ -57,7 +56,6 @@ NullAction, ) from openhands.events.observation import ( - AgentStateChangedObservation, ErrorObservation, FileReadObservation, FileWriteObservation, diff --git a/openhands/server/session/agent_session.py b/openhands/server/session/agent_session.py index 8f9d20a5dc6e..96120b357c9d 100644 --- a/openhands/server/session/agent_session.py +++ b/openhands/server/session/agent_session.py @@ -143,6 +143,7 @@ async def _close(self): end_state.save_to_session(self.sid, self.file_store) await self.controller.close() if self.runtime is not None: + self.runtime.close() if self.security_analyzer is not None: await self.security_analyzer.close() diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index 03ea022bb1c2..0a5a801030da 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -7,6 +7,10 @@ from openhands.core.config import AppConfig from openhands.core.logger import openhands_logger as logger +from openhands.core.schema.agent import AgentState +from openhands.events.event import EventSource +from openhands.events.observation.agent import AgentStateChangedObservation +from openhands.events.serialization.event import event_to_dict from openhands.events.stream import session_exists from openhands.server.session.conversation import Conversation from openhands.server.session.session import Session @@ -39,6 +43,9 @@ async def init_or_join_local_session(self, sio: socketio.AsyncServer, sid: str, """ If there is no local session running, initialize one """ session = self.local_sessions_by_sid.get(sid) if not session: + + # I think we need to rehydrate here + session = Session( sid=sid, file_store=self.file_store, config=self.config, sio=sio, ws=None ) @@ -49,6 +56,7 @@ async def init_or_join_local_session(self, sio: socketio.AsyncServer, sid: str, else: session.connect(connection_id) self.local_sessions_by_connection_id[connection_id] = session + session.agent_session.event_stream.add_event(AgentStateChangedObservation('', AgentState.INIT), EventSource.ENVIRONMENT) return session def get_local_session(self, connection_id: str) -> Session: @@ -71,4 +79,4 @@ async def _check_and_close_session(self, session: Session): # If the sleep was cancelled, we still want to close these if not session.connection_ids: session.close() - self.local_sessions_by_sid.pop(session.sid) \ No newline at end of file + self.local_sessions_by_sid.pop(session.sid, None) \ No newline at end of file diff --git a/openhands/server/session/session.py b/openhands/server/session/session.py index 9a33caf57d44..172c81177df9 100644 --- a/openhands/server/session/session.py +++ b/openhands/server/session/session.py @@ -11,7 +11,7 @@ from openhands.core.schema import AgentState from openhands.core.schema.action import ActionType from openhands.core.schema.config import ConfigType -from openhands.events.action import ChangeAgentStateAction, MessageAction, NullAction +from openhands.events.action import MessageAction, NullAction from openhands.events.event import Event, EventSource from openhands.events.observation import ( AgentStateChangedObservation, @@ -25,7 +25,6 @@ from openhands.server.session.agent_session import AgentSession from openhands.storage.files import FileStore from openhands.utils.async_utils import wait_all -from openhands.utils.shutdown_listener import should_continue class Session: @@ -67,9 +66,6 @@ def close(self): self.agent_session.close() async def initialize_agent(self, data: dict): - self.agent_session.event_stream.add_event( - ChangeAgentStateAction(AgentState.LOADING), EventSource.ENVIRONMENT - ) self.agent_session.event_stream.add_event( AgentStateChangedObservation('', AgentState.LOADING), EventSource.ENVIRONMENT, From c5c3f9099aa1d5db508888d2cc52be896b754f71 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 15 Nov 2024 14:34:20 -0700 Subject: [PATCH 22/91] Fix for rehydrate --- openhands/server/listen.py | 11 +++++++---- openhands/server/session/manager.py | 10 ++++++---- openhands/server/session/session.py | 4 ---- 3 files changed, 13 insertions(+), 12 deletions(-) diff --git a/openhands/server/listen.py b/openhands/server/listen.py index c7aa77c20620..7ecb6ae34b39 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -921,10 +921,13 @@ async def init_connection(connection_id: str, data: dict): session = await session_manager.init_or_join_local_session(sio, sid, connection_id, data) # Send events - async_stream = AsyncEventStreamWrapper( - session.agent_session.event_stream, latest_event_id + 1 - ) - async for event in async_stream: + #async_stream = AsyncEventStreamWrapper( + # session.agent_session.event_stream, latest_event_id + 1 + #) + #async for event in async_stream: + events = list(session.agent_session.event_stream.get_events(latest_event_id + 1)) + logger.info(len(events)) + for event in events: if isinstance( event, ( diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index 0a5a801030da..b3d41b603117 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -15,6 +15,7 @@ from openhands.server.session.conversation import Conversation from openhands.server.session.session import Session from openhands.storage.files import FileStore +from openhands.utils.shutdown_listener import should_continue @dataclass @@ -43,9 +44,7 @@ async def init_or_join_local_session(self, sio: socketio.AsyncServer, sid: str, """ If there is no local session running, initialize one """ session = self.local_sessions_by_sid.get(sid) if not session: - - # I think we need to rehydrate here - + # I think we need to rehydrate here, but it does not seem to be working session = Session( sid=sid, file_store=self.file_store, config=self.config, sio=sio, ws=None ) @@ -69,7 +68,10 @@ async def disconnect_from_local_session(self, connection_id: str): logger.warning(f'disconnect_from_uninitialized_session:{connection_id}') return if session.disconnect(connection_id): - asyncio.create_task(self._check_and_close_session(session)) + if should_continue(): + asyncio.create_task(self._check_and_close_session(session)) + else: + await self._check_and_close_session(session) async def _check_and_close_session(self, session: Session): # Once there have been no connections to a session for a reasonable period, we close it diff --git a/openhands/server/session/session.py b/openhands/server/session/session.py index 172c81177df9..70b2d4d2a36b 100644 --- a/openhands/server/session/session.py +++ b/openhands/server/session/session.py @@ -148,10 +148,6 @@ async def on_event(self, event: Event): await self.send(event_dict) async def dispatch(self, data: dict): - action = data.get('action', '') - if action == ActionType.INIT: - await self._initialize_agent(data) - return event = event_from_dict(data.copy()) # This checks if the model supports images if isinstance(event, MessageAction) and event.image_urls: From 5cff416fc988f0432e3e4926c30b3d42020c2bcb Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 15 Nov 2024 14:35:25 -0700 Subject: [PATCH 23/91] Fix --- openhands/server/listen.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/openhands/server/listen.py b/openhands/server/listen.py index 7ecb6ae34b39..c7aa77c20620 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -921,13 +921,10 @@ async def init_connection(connection_id: str, data: dict): session = await session_manager.init_or_join_local_session(sio, sid, connection_id, data) # Send events - #async_stream = AsyncEventStreamWrapper( - # session.agent_session.event_stream, latest_event_id + 1 - #) - #async for event in async_stream: - events = list(session.agent_session.event_stream.get_events(latest_event_id + 1)) - logger.info(len(events)) - for event in events: + async_stream = AsyncEventStreamWrapper( + session.agent_session.event_stream, latest_event_id + 1 + ) + async for event in async_stream: if isinstance( event, ( From a6d2b520a38ad3750f1fe706da012ced4df5d8ba Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 15 Nov 2024 14:46:25 -0700 Subject: [PATCH 24/91] WIP --- .../components/chat/chat-interface.test.tsx | 14 +++++++------- frontend/src/context/ws-client-provider.tsx | 15 ++++++--------- 2 files changed, 13 insertions(+), 16 deletions(-) diff --git a/frontend/__tests__/components/chat/chat-interface.test.tsx b/frontend/__tests__/components/chat/chat-interface.test.tsx index fc4c03e3f68c..71116e96515a 100644 --- a/frontend/__tests__/components/chat/chat-interface.test.tsx +++ b/frontend/__tests__/components/chat/chat-interface.test.tsx @@ -16,14 +16,14 @@ describe("Empty state", () => { send: vi.fn(), })); - const { useWsClient: useWsClientMock } = vi.hoisted(() => ({ - useWsClient: vi.fn(() => ({ send: sendMock, runtimeActive: true })), + const { useSocket: useSocketMock } = vi.hoisted(() => ({ + useSocket: vi.fn(() => ({ send: sendMock, runtimeActive: true })), })); beforeAll(() => { vi.mock("#/context/socket", async (importActual) => ({ - ...(await importActual()), - useWsClient: useWsClientMock, + ...(await importActual()), + useSocket: useSocketMock, })); }); @@ -77,7 +77,7 @@ describe("Empty state", () => { "should load the a user message to the input when selecting", async () => { // this is to test that the message is in the UI before the socket is called - useWsClientMock.mockImplementation(() => ({ + useSocketMock.mockImplementation(() => ({ send: sendMock, runtimeActive: false, // mock an inactive runtime setup })); @@ -106,7 +106,7 @@ describe("Empty state", () => { it.fails( "should send the message to the socket only if the runtime is active", async () => { - useWsClientMock.mockImplementation(() => ({ + useSocketMock.mockImplementation(() => ({ send: sendMock, runtimeActive: false, // mock an inactive runtime setup })); @@ -123,7 +123,7 @@ describe("Empty state", () => { await user.click(displayedSuggestions[0]); expect(sendMock).not.toHaveBeenCalled(); - useWsClientMock.mockImplementation(() => ({ + useSocketMock.mockImplementation(() => ({ send: sendMock, runtimeActive: true, // mock an active runtime setup })); diff --git a/frontend/src/context/ws-client-provider.tsx b/frontend/src/context/ws-client-provider.tsx index 840c3af520cd..8e91d3197f04 100644 --- a/frontend/src/context/ws-client-provider.tsx +++ b/frontend/src/context/ws-client-provider.tsx @@ -50,7 +50,7 @@ export function WsClientProvider({ ); const [status, setStatus] = React.useState(WsClientProviderStatus.STOPPED); const [events, setEvents] = React.useState[]>([]); - const lastEventRef = React.useRef | null>(null) + const lastEventRef = React.useRef | null>(null); function send(event: Record) { if (!sioRef.current) { @@ -74,8 +74,8 @@ export function WsClientProvider({ if (ghToken) { initEvent.github_token = ghToken; } - const lastEvent = lastEventRef.current - if (lastEvent && !isNaN(parseInt(lastEvent.id as string))) { + const lastEvent = lastEventRef.current; + if (lastEvent && !Number.isNaN(parseInt(lastEvent.id as string, 10))) { initEvent.latest_event_id = lastEvent.id; } send(initEvent); @@ -93,12 +93,12 @@ export function WsClientProvider({ event?.observation === "error" ) { setStatus(WsClientProviderStatus.ERROR); - return + return; } if (!event.token) { - //setStatus(WsClientProviderStatus.ACTIVE); - //} else { + // setStatus(WsClientProviderStatus.ACTIVE); + // } else { handleAssistantMessage(event); } } @@ -137,9 +137,6 @@ export function WsClientProvider({ import.meta.env.VITE_BACKEND_BASE_URL || window?.location.host; sio = io(baseUrl, { transports: ["websocket"], - // extraHeaders: { - // Testy: "TESTER" - // }, // We force a new connection, because the headers may have changed. // forceNew: true, From 4fac74d52423668f72330214468c83006580208b Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Mon, 18 Nov 2024 09:32:18 -0700 Subject: [PATCH 25/91] Fixed frontend tests --- .../components/chat/chat-interface.test.tsx | 17 +++++++++-------- frontend/__tests__/hooks/use-terminal.test.tsx | 3 +-- frontend/src/context/ws-client-provider.tsx | 1 - frontend/src/mocks/handlers.ws.ts | 2 +- frontend/src/types/core/variances.ts | 2 +- frontend/test-utils.tsx | 3 +-- 6 files changed, 13 insertions(+), 15 deletions(-) diff --git a/frontend/__tests__/components/chat/chat-interface.test.tsx b/frontend/__tests__/components/chat/chat-interface.test.tsx index 4176762041bf..85304956cde1 100644 --- a/frontend/__tests__/components/chat/chat-interface.test.tsx +++ b/frontend/__tests__/components/chat/chat-interface.test.tsx @@ -13,12 +13,12 @@ const renderChatInterface = (messages: (Message | ErrorMessage)[]) => renderWithProviders(); describe("Empty state", () => { - const { emit: emitMock } = vi.hoisted(() => ({ - emit: vi.fn(), + const { send: sendMock } = vi.hoisted(() => ({ + send: vi.fn(), })); const { useWsClient: useWsClientMock } = vi.hoisted(() => ({ - useWsClient: vi.fn(() => ({ emit: emitMock, status: WsClientProviderStatus.ACTIVE, isLoadingMessages: false })), + useWsClient: vi.fn(() => ({ send: sendMock, status: WsClientProviderStatus.ACTIVE, isLoadingMessages: false })), })); beforeAll(() => { @@ -84,7 +84,7 @@ describe("Empty state", () => { async () => { // this is to test that the message is in the UI before the socket is called useWsClientMock.mockImplementation(() => ({ - emit: emitMock, status: WsClientProviderStatus.ACTIVE, isLoadingMessages: false + send: sendMock, status: WsClientProviderStatus.ACTIVE, isLoadingMessages: false })); const addUserMessageSpy = vi.spyOn(ChatSlice, "addUserMessage"); const user = userEvent.setup(); @@ -112,7 +112,7 @@ describe("Empty state", () => { "should send the message to the socket only if the runtime is active", async () => { useWsClientMock.mockImplementation(() => ({ - emit: emitMock, status: WsClientProviderStatus.ACTIVE, isLoadingMessages: false + send: sendMock, status: WsClientProviderStatus.ACTIVE, isLoadingMessages: false })); const user = userEvent.setup(); const { rerender } = renderWithProviders(, { @@ -121,19 +121,20 @@ describe("Empty state", () => { }, }); + const suggestions = screen.getByTestId("suggestions"); const displayedSuggestions = within(suggestions).getAllByRole("button"); await user.click(displayedSuggestions[0]); - expect(emitMock).not.toHaveBeenCalled(); + expect(sendMock).not.toHaveBeenCalled(); useWsClientMock.mockImplementation(() => ({ - emit: emitMock, status: WsClientProviderStatus.ACTIVE, isLoadingMessages: false + send: sendMock, status: WsClientProviderStatus.ACTIVE, isLoadingMessages: false })); rerender(); await waitFor(() => - expect(emitMock).toHaveBeenCalledWith(expect.any(String)), + expect(sendMock).toHaveBeenCalledWith(expect.any(String)), ); }, ); diff --git a/frontend/__tests__/hooks/use-terminal.test.tsx b/frontend/__tests__/hooks/use-terminal.test.tsx index 7a5d6e9c5460..dbe6a5b03034 100644 --- a/frontend/__tests__/hooks/use-terminal.test.tsx +++ b/frontend/__tests__/hooks/use-terminal.test.tsx @@ -3,7 +3,6 @@ import { render } from "@testing-library/react"; import { afterEach } from "node:test"; import { useTerminal } from "#/hooks/useTerminal"; import { Command } from "#/state/commandSlice"; -import { WsClientProvider } from "#/context/ws-client-provider"; import { ReactNode } from "react"; interface TestTerminalComponentProps { @@ -26,7 +25,7 @@ interface WrapperProps { function Wrapper({children}: WrapperProps) { return ( - {children} +
{children}
) } diff --git a/frontend/src/context/ws-client-provider.tsx b/frontend/src/context/ws-client-provider.tsx index f7760812dce7..ab700a1fed57 100644 --- a/frontend/src/context/ws-client-provider.tsx +++ b/frontend/src/context/ws-client-provider.tsx @@ -69,7 +69,6 @@ export function WsClientProvider({ } function handleConnect() { - console.log("TRACE:SIO:SET_STATUS:OPENING"); setStatus(WsClientProviderStatus.OPENING); const initEvent: Record = { diff --git a/frontend/src/mocks/handlers.ws.ts b/frontend/src/mocks/handlers.ws.ts index bf259c763989..0cacfe4ad9a0 100644 --- a/frontend/src/mocks/handlers.ws.ts +++ b/frontend/src/mocks/handlers.ws.ts @@ -55,7 +55,7 @@ export const handlers: WebSocketHandler[] = [ api.addEventListener("connection", ({ client }) => { client.send( JSON.stringify({ - status: "ok", + status: 200, token: Math.random().toString(36).substring(7), } satisfies TokenConfigSuccess), ); diff --git a/frontend/src/types/core/variances.ts b/frontend/src/types/core/variances.ts index 2a0b5d4e1d9b..1ac624b12342 100644 --- a/frontend/src/types/core/variances.ts +++ b/frontend/src/types/core/variances.ts @@ -1,7 +1,7 @@ /** Variances are types which do not conform to the current event pattern */ export interface TokenConfigSuccess { - status: "ok"; + status: "ok" | number; token: string; } diff --git a/frontend/test-utils.tsx b/frontend/test-utils.tsx index 9558019bc152..c382cda7681c 100644 --- a/frontend/test-utils.tsx +++ b/frontend/test-utils.tsx @@ -6,7 +6,6 @@ import { configureStore } from "@reduxjs/toolkit"; // eslint-disable-next-line import/no-extraneous-dependencies import { RenderOptions, render } from "@testing-library/react"; import { AppStore, RootState, rootReducer } from "./src/store"; -import { WsClientProvider } from "#/context/ws-client-provider"; const setupStore = (preloadedState?: Partial): AppStore => configureStore({ @@ -35,7 +34,7 @@ export function renderWithProviders( function Wrapper({ children }: PropsWithChildren): JSX.Element { return ( - {children} + {children} ); } From a97db529b12578b7aec41c7a50310bd3679348c0 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Mon, 18 Nov 2024 10:34:10 -0700 Subject: [PATCH 26/91] Lint fixes --- openhands/server/listen.py | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/openhands/server/listen.py b/openhands/server/listen.py index 6c440a921c49..e96f433a2b98 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -1,11 +1,7 @@ -from ast import parse -import asyncio -import json import os import re import tempfile import time -import uuid import warnings import jwt @@ -848,7 +844,7 @@ async def get_response(self, path: str, scope): @sio.event async def connect(connection_id: str, environ): - logger.info(f"sio:connect: {connection_id}") + logger.info(f'sio:connect: {connection_id}') @sio.event @@ -915,7 +911,7 @@ async def oh_action(connection_id: str, data: dict): await init_connection(connection_id, data) return - logger.info(f"sio:oh_action:{connection_id}") + logger.info(f'sio:oh_action:{connection_id}') session = session_manager.get_local_session(connection_id) await session.dispatch(data) @@ -924,7 +920,7 @@ async def init_connection(connection_id: str, data: dict): gh_token = data.pop('gh_token', None) if not await authenticate_github_user(gh_token): raise RuntimeError(status.WS_1008_POLICY_VIOLATION) - + token = data.pop('token', None) if token: sid = get_sid_from_token(token, config.jwt_secret) @@ -937,12 +933,14 @@ async def init_connection(connection_id: str, data: dict): logger.info(f'New session: {sid}') token = sign_token({'sid': sid}, config.jwt_secret) - await sio.emit("oh_event", {'token': token, 'status': 'ok'}, to=connection_id) + await sio.emit('oh_event', {'token': token, 'status': 'ok'}, to=connection_id) - latest_event_id = int(data.pop("latest_event_id", -1)) + latest_event_id = int(data.pop('latest_event_id', -1)) # The session in question should exist, but may not actually be running locally... - session = await session_manager.init_or_join_local_session(sio, sid, connection_id, data) + session = await session_manager.init_or_join_local_session( + sio, sid, connection_id, data + ) # Send events async_stream = AsyncEventStreamWrapper( @@ -958,7 +956,7 @@ async def init_connection(connection_id: str, data: dict): ), ): continue - await sio.emit("oh_event", event_to_dict(event), to=connection_id) + await sio.emit('oh_event', event_to_dict(event), to=connection_id) @sio.event From bc7cb2542773b97378244445ff945017d71f965e Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Mon, 18 Nov 2024 13:55:45 -0700 Subject: [PATCH 27/91] Simple redis integration --- openhands/server/listen.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/openhands/server/listen.py b/openhands/server/listen.py index e96f433a2b98..012eaf1d9d0f 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -838,7 +838,16 @@ async def get_response(self, path: str, scope): app.mount('/', SPAStaticFiles(directory='./frontend/build', html=True), name='dist') -sio = socketio.AsyncServer(async_mode='asgi', cors_allowed_origins='*') +client_manager = None +# This is where we add the redis manager +redis_host = os.environ.get('REDIS_HOST') +if redis_host: + client_manager = socketio.AsyncRedisManager(redis_host) +sio = socketio.AsyncServer( + async_mode='asgi', + cors_allowed_origins='*', + client_manager=client_manager +) app = socketio.ASGIApp(sio, other_asgi_app=app) From 6b2691a540f1410bf7ffcefc02c2f54f0bed841f Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Mon, 18 Nov 2024 14:00:34 -0700 Subject: [PATCH 28/91] Lint fixes --- openhands/server/listen.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/openhands/server/listen.py b/openhands/server/listen.py index 012eaf1d9d0f..e5fcd1605a4b 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -844,9 +844,7 @@ async def get_response(self, path: str, scope): if redis_host: client_manager = socketio.AsyncRedisManager(redis_host) sio = socketio.AsyncServer( - async_mode='asgi', - cors_allowed_origins='*', - client_manager=client_manager + async_mode='asgi', cors_allowed_origins='*', client_manager=client_manager ) app = socketio.ASGIApp(sio, other_asgi_app=app) From f5ab4bfa2eec8f52d589a9b5363f428e74a47d96 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Mon, 18 Nov 2024 14:03:39 -0700 Subject: [PATCH 29/91] Lint fixes --- openhands/server/listen.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openhands/server/listen.py b/openhands/server/listen.py index e5fcd1605a4b..3514e2ad9341 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -34,7 +34,6 @@ HTTPException, Request, UploadFile, - WebSocket, status, ) from fastapi.responses import FileResponse, JSONResponse From 02c79c774f9e9fc483d006145f79f1ad820a3a6c Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Mon, 18 Nov 2024 14:07:25 -0700 Subject: [PATCH 30/91] Lint fix --- openhands/server/listen.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openhands/server/listen.py b/openhands/server/listen.py index 3514e2ad9341..fa90e477beee 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -6,9 +6,9 @@ import jwt import requests +import socketio from pathspec import PathSpec from pathspec.patterns import GitWildMatchPattern -import socketio from openhands.core.schema.action import ActionType from openhands.runtime.impl.remote.remote_runtime import RemoteRuntime From 6698b153e4a7b89d91502fa3cbf3ab436fd4feef Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Mon, 18 Nov 2024 15:10:47 -0700 Subject: [PATCH 31/91] Added redis dependency --- poetry.lock | 17 ++++++++++++++++- pyproject.toml | 3 +++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index 483557bdd85d..002df29b419a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -7487,6 +7487,21 @@ files = [ [package.dependencies] cffi = {version = "*", markers = "implementation_name == \"pypy\""} +[[package]] +name = "redis" +version = "5.2.0" +description = "Python client for Redis database and key-value store" +optional = false +python-versions = ">=3.8" +files = [ + {file = "redis-5.2.0-py3-none-any.whl", hash = "sha256:ae174f2bb3b1bf2b09d54bf3e51fbc1469cf6c10aa03e21141f51969801a7897"}, + {file = "redis-5.2.0.tar.gz", hash = "sha256:0b1087665a771b1ff2e003aa5bdd354f15a70c9e25d5a7dbf9c722c16528a7b0"}, +] + +[package.extras] +hiredis = ["hiredis (>=3.0.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==23.2.1)", "requests (>=2.31.0)"] + [[package]] name = "referencing" version = "0.35.1" @@ -10260,4 +10275,4 @@ testing = ["coverage[toml]", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "62bd678918aa213d511ee04e834f74ded805b0480047dd2dfe0555a707f19d78" +content-hash = "602c8fb3bbea0af2d9cf7477bb8f4f9d620b8d0834de2bfb2422c2d6706292a1" diff --git a/pyproject.toml b/pyproject.toml index 376c78e624e3..34e92ff9ef77 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -65,6 +65,7 @@ runloop-api-client = "0.7.0" pygithub = "^2.5.0" openhands-aci = "^0.1.1" python-socketio = "^5.11.4" +redis = "^5.2.0" [tool.poetry.group.llama-index.dependencies] llama-index = "*" @@ -96,6 +97,7 @@ reportlab = "*" [tool.coverage.run] concurrency = ["gevent"] + [tool.poetry.group.runtime.dependencies] jupyterlab = "*" notebook = "*" @@ -126,6 +128,7 @@ ignore = ["D1"] [tool.ruff.lint.pydocstyle] convention = "google" + [tool.poetry.group.evaluation.dependencies] streamlit = "*" whatthepatch = "*" From b10b8fe7c7b1223811b5c03e2ff161d464599d2f Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Mon, 18 Nov 2024 15:28:46 -0700 Subject: [PATCH 32/91] WS --- frontend/src/context/ws-client-provider.tsx | 10 ---------- openhands/server/listen.py | 1 - openhands/server/session/manager.py | 3 +-- openhands/server/session/session.py | 16 +++------------- 4 files changed, 4 insertions(+), 26 deletions(-) diff --git a/frontend/src/context/ws-client-provider.tsx b/frontend/src/context/ws-client-provider.tsx index ab700a1fed57..1c85d282b48a 100644 --- a/frontend/src/context/ws-client-provider.tsx +++ b/frontend/src/context/ws-client-provider.tsx @@ -147,16 +147,6 @@ export function WsClientProvider({ import.meta.env.VITE_BACKEND_BASE_URL || window?.location.host; sio = io(baseUrl, { transports: ["websocket"], - // We force a new connection, because the headers may have changed. - // forceNew: true, - - // Had to do this for now because reconnection actually starts a new session, - // which we don't want - The reconnect has the same headers as the original - // which don't include the original session id - // reconnection: true, - // reconnectionDelay: 1000, - // reconnectionDelayMax : 5000, - // reconnectionAttempts: 5 }); } sio.on("connect", handleConnect); diff --git a/openhands/server/listen.py b/openhands/server/listen.py index fa90e477beee..a804b4103fa4 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -838,7 +838,6 @@ async def get_response(self, path: str, scope): app.mount('/', SPAStaticFiles(directory='./frontend/build', html=True), name='dist') client_manager = None -# This is where we add the redis manager redis_host = os.environ.get('REDIS_HOST') if redis_host: client_manager = socketio.AsyncRedisManager(redis_host) diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index b3d41b603117..2d86d738f0c0 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -2,7 +2,6 @@ import time from dataclasses import dataclass, field -from fastapi import WebSocket import socketio from openhands.core.config import AppConfig @@ -46,7 +45,7 @@ async def init_or_join_local_session(self, sio: socketio.AsyncServer, sid: str, if not session: # I think we need to rehydrate here, but it does not seem to be working session = Session( - sid=sid, file_store=self.file_store, config=self.config, sio=sio, ws=None + sid=sid, file_store=self.file_store, config=self.config, sio=sio ) session.connect(connection_id) self.local_sessions_by_sid[sid] = session diff --git a/openhands/server/session/session.py b/openhands/server/session/session.py index 99d08e2814fe..7f1765c83a4e 100644 --- a/openhands/server/session/session.py +++ b/openhands/server/session/session.py @@ -1,7 +1,6 @@ import asyncio import time -from fastapi import WebSocket, WebSocketDisconnect import socketio from openhands.controller.agent import Agent @@ -29,7 +28,6 @@ class Session: sid: str - websocket: WebSocket | None sio: socketio.AsyncServer | None connection_ids: set[str] last_active_ts: int = 0 @@ -38,10 +36,9 @@ class Session: loop: asyncio.AbstractEventLoop def __init__( - self, sid: str, ws: WebSocket | None, config: AppConfig, file_store: FileStore, sio: socketio.AsyncServer | None + self, sid: str, config: AppConfig, file_store: FileStore, sio: socketio.AsyncServer | None ): self.sid = sid - self.websocket = ws self.sio = sio self.last_active_ts = int(time.time()) self.agent_session = AgentSession( @@ -63,12 +60,7 @@ def disconnect(self, connection_id: str) -> bool: def close(self): self.is_alive = False - try: - if self.websocket is not None: - asyncio.run_coroutine_threadsafe(self.websocket.close(), self.loop) - self.websocket = None - finally: - self.agent_session.close() + self.agent_session.close() async def initialize_agent(self, data: dict): self.agent_session.event_stream.add_event( @@ -173,8 +165,6 @@ async def send(self, data: dict[str, object]) -> bool: try: if not self.is_alive: return False - if self.websocket: - await self.websocket.send_json(data) if self.sio: await wait_all( self.sio.emit("oh_event", data, to=connection_id) @@ -183,7 +173,7 @@ async def send(self, data: dict[str, object]) -> bool: await asyncio.sleep(0.001) # This flushes the data to the client self.last_active_ts = int(time.time()) return True - except (RuntimeError, WebSocketDisconnect): + except RuntimeError: self.is_alive = False return False From 4f5c4a3f6eca9b0b9a9df633e08c5b57d1142ed6 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Mon, 18 Nov 2024 18:07:35 -0700 Subject: [PATCH 33/91] Added ability to configure close delay --- openhands/core/config/sandbox_config.py | 1 + openhands/server/session/manager.py | 2 +- openhands/server/session/session.py | 1 + 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/openhands/core/config/sandbox_config.py b/openhands/core/config/sandbox_config.py index bc6a84622ce5..4bbfba716bae 100644 --- a/openhands/core/config/sandbox_config.py +++ b/openhands/core/config/sandbox_config.py @@ -54,6 +54,7 @@ class SandboxConfig: runtime_startup_env_vars: dict[str, str] = field(default_factory=dict) browsergym_eval_env: str | None = None platform: str | None = None + close_delay: int = 15 def defaults_to_dict(self) -> dict: """Serialize fields to a dict for the frontend, including type hints, defaults, and whether it's optional.""" diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index 2d86d738f0c0..24389f07b110 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -75,7 +75,7 @@ async def disconnect_from_local_session(self, connection_id: str): async def _check_and_close_session(self, session: Session): # Once there have been no connections to a session for a reasonable period, we close it try: - await asyncio.sleep(15) + await asyncio.sleep(self.config.sandbox.close_delay) finally: # If the sleep was cancelled, we still want to close these if not session.connection_ids: diff --git a/openhands/server/session/session.py b/openhands/server/session/session.py index 7f1765c83a4e..4e87d3225ea9 100644 --- a/openhands/server/session/session.py +++ b/openhands/server/session/session.py @@ -34,6 +34,7 @@ class Session: is_alive: bool = True agent_session: AgentSession loop: asyncio.AbstractEventLoop + config: AppConfig def __init__( self, sid: str, config: AppConfig, file_store: FileStore, sio: socketio.AsyncServer | None From 5c3a9cb4b07ceb05b1bee1a28893157eba773af1 Mon Sep 17 00:00:00 2001 From: Robert Brennan Date: Tue, 19 Nov 2024 09:51:10 -0500 Subject: [PATCH 34/91] Update openhands/server/listen.py --- openhands/server/listen.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openhands/server/listen.py b/openhands/server/listen.py index de0e5da611bf..2773a3de4476 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -833,7 +833,7 @@ async def get_response(self, path: str, scope): client_manager = None redis_host = os.environ.get('REDIS_HOST') if redis_host: - client_manager = socketio.AsyncRedisManager(redis_host) + client_manager = socketio.AsyncRedisManager(f'redis://{redis_host}') sio = socketio.AsyncServer( async_mode='asgi', cors_allowed_origins='*', client_manager=client_manager ) From c49feb49167486a1e5b307a85ecb60cd84c6766d Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Tue, 19 Nov 2024 08:04:57 -0700 Subject: [PATCH 35/91] Updated readme --- openhands/server/session/README.md | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 openhands/server/session/README.md diff --git a/openhands/server/session/README.md b/openhands/server/session/README.md new file mode 100644 index 000000000000..bf77989f166b --- /dev/null +++ b/openhands/server/session/README.md @@ -0,0 +1,25 @@ + +# Session Management + +Socket.io is used as the underlying protocol for client server communication. This allows the event +handlers in the code to be somewhat separate from the connection management - so brief connection +interruptions are recoverable. + +There are 3 main server side event handlers: + +* `connect` - Invoked when a new connection to the server is established. (This may be via http or WebSocket) +* `oh_action` - Invoked when a connected client sends an event (Such as `INIT` or a prompt for the Agent) +* `disconnect` - Invoked when a connected client disconnects from the server. + +## Init +Each connection has a unique id, and when initially established, is not associated with any session. An +`INIT` event must be sent to the server in order to attach a connection to a session. The `INIT` event +may optionally include a GitHub token and a token to connect to an existing session. (Which may be running +locally or may need to be hydrated). If no token is received as part of the init event, it is assumed a +new session should be started. + +## Disconnect +The (manager)[manager.py] manages connections and sessions. Each session may have zero or more connections +associated with it, managed by invocations of `INIT` and disconnect. When a session no longer has any +connections associated with it, after a set amount of time (determined by `config.sandbox.close_delay`), +the session and runtime are passivated (So will need to be rehydrated to continue.) From 7c4e907bda5e4d32ed82f53ac83d1af1853f393b Mon Sep 17 00:00:00 2001 From: Robert Brennan Date: Tue, 19 Nov 2024 11:20:24 -0500 Subject: [PATCH 36/91] Update openhands/server/listen.py --- openhands/server/listen.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openhands/server/listen.py b/openhands/server/listen.py index 2773a3de4476..2d9a7614a600 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -833,7 +833,7 @@ async def get_response(self, path: str, scope): client_manager = None redis_host = os.environ.get('REDIS_HOST') if redis_host: - client_manager = socketio.AsyncRedisManager(f'redis://{redis_host}') + client_manager = socketio.AsyncRedisManager(f'redis://{redis_host}', password=os.environ.get('REDIS_PASSWORD')) sio = socketio.AsyncServer( async_mode='asgi', cors_allowed_origins='*', client_manager=client_manager ) From cb5d4b9116c3be1c319652486eb7e7c83913d153 Mon Sep 17 00:00:00 2001 From: Robert Brennan Date: Tue, 19 Nov 2024 11:21:09 -0500 Subject: [PATCH 37/91] Update openhands/server/listen.py --- openhands/server/listen.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openhands/server/listen.py b/openhands/server/listen.py index 2d9a7614a600..26f2596a7f8b 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -833,7 +833,7 @@ async def get_response(self, path: str, scope): client_manager = None redis_host = os.environ.get('REDIS_HOST') if redis_host: - client_manager = socketio.AsyncRedisManager(f'redis://{redis_host}', password=os.environ.get('REDIS_PASSWORD')) + client_manager = socketio.AsyncRedisManager(f'redis://{redis_host}', redis_options={'password': os.environ.get('REDIS_PASSWORD')}) sio = socketio.AsyncServer( async_mode='asgi', cors_allowed_origins='*', client_manager=client_manager ) From 63b367e9d811bea42ca5d3e24e4850281cb9d81a Mon Sep 17 00:00:00 2001 From: Robert Brennan Date: Tue, 19 Nov 2024 11:34:35 -0500 Subject: [PATCH 38/91] delint --- openhands/server/listen.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openhands/server/listen.py b/openhands/server/listen.py index 26f2596a7f8b..6e0040e82c13 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -833,7 +833,10 @@ async def get_response(self, path: str, scope): client_manager = None redis_host = os.environ.get('REDIS_HOST') if redis_host: - client_manager = socketio.AsyncRedisManager(f'redis://{redis_host}', redis_options={'password': os.environ.get('REDIS_PASSWORD')}) + client_manager = socketio.AsyncRedisManager( + f'redis://{redis_host}', + redis_options={'password': os.environ.get('REDIS_PASSWORD')}, + ) sio = socketio.AsyncServer( async_mode='asgi', cors_allowed_origins='*', client_manager=client_manager ) From 5c978cd29fa4e852a158f695931035f3ce21370a Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Tue, 19 Nov 2024 10:51:51 -0700 Subject: [PATCH 39/91] Fixed typo --- list.txt | 1 + openhands/server/listen.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 list.txt diff --git a/list.txt b/list.txt new file mode 100644 index 000000000000..265e23731df8 --- /dev/null +++ b/list.txt @@ -0,0 +1 @@ +tofarr diff --git a/openhands/server/listen.py b/openhands/server/listen.py index 2773a3de4476..18fbb0806019 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -915,7 +915,7 @@ async def oh_action(connection_id: str, data: dict): async def init_connection(connection_id: str, data: dict): - gh_token = data.pop('gh_token', None) + gh_token = data.pop('github_token', None) if not await authenticate_github_user(gh_token): raise RuntimeError(status.WS_1008_POLICY_VIOLATION) From 7ce72b6abfa515cfcca7b62f68eb8ee080f3183c Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Tue, 19 Nov 2024 11:20:11 -0700 Subject: [PATCH 40/91] More logging --- openhands/server/session/session.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openhands/server/session/session.py b/openhands/server/session/session.py index 4e87d3225ea9..6d1dece898fe 100644 --- a/openhands/server/session/session.py +++ b/openhands/server/session/session.py @@ -163,6 +163,7 @@ async def dispatch(self, data: dict): self.agent_session.event_stream.add_event(event, EventSource.USER) async def send(self, data: dict[str, object]) -> bool: + logger.info(f"TRACE:send:{data}") try: if not self.is_alive: return False From 29f1ed21e844d1b344f338c99c79008766b5d04c Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Tue, 19 Nov 2024 12:22:29 -0700 Subject: [PATCH 41/91] Fix for send --- openhands/server/session/session.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openhands/server/session/session.py b/openhands/server/session/session.py index 6d1dece898fe..b12cf472c412 100644 --- a/openhands/server/session/session.py +++ b/openhands/server/session/session.py @@ -163,7 +163,11 @@ async def dispatch(self, data: dict): self.agent_session.event_stream.add_event(event, EventSource.USER) async def send(self, data: dict[str, object]) -> bool: - logger.info(f"TRACE:send:{data}") + task = self.loop.create_task(self._send(data)) + await task + return task.result + + async def _send(self, data: dict[str, object]) -> bool: try: if not self.is_alive: return False @@ -176,6 +180,7 @@ async def send(self, data: dict[str, object]) -> bool: self.last_active_ts = int(time.time()) return True except RuntimeError: + logger.error("Error sending", stack_info=True, exc_info=True) self.is_alive = False return False From ec2125968c442035b4a688e5baa79461675352f1 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Tue, 19 Nov 2024 12:29:12 -0700 Subject: [PATCH 42/91] Fix for rate limiter --- openhands/server/middleware.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openhands/server/middleware.py b/openhands/server/middleware.py index 872241fc865f..792b0163b60a 100644 --- a/openhands/server/middleware.py +++ b/openhands/server/middleware.py @@ -58,6 +58,7 @@ def __init__(self, requests: int = 2, seconds: int = 1, sleep_seconds: int = 1): self.requests = requests self.seconds = seconds self.history = defaultdict(list) + self.sleep_seconds = sleep_seconds def _clean_old_requests(self, key: str) -> None: now = datetime.now() From 90f59b1aef6df5073f7987c9f224c1bdd2961c1d Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Tue, 19 Nov 2024 12:31:48 -0700 Subject: [PATCH 43/91] Lint fix --- openhands/server/session/session.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openhands/server/session/session.py b/openhands/server/session/session.py index b12cf472c412..74cd4fbd1103 100644 --- a/openhands/server/session/session.py +++ b/openhands/server/session/session.py @@ -165,7 +165,7 @@ async def dispatch(self, data: dict): async def send(self, data: dict[str, object]) -> bool: task = self.loop.create_task(self._send(data)) await task - return task.result + return task.result() async def _send(self, data: dict[str, object]) -> bool: try: From 84dd0f2dd353e428924a0cf3dc285b4afdcbf25e Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Tue, 19 Nov 2024 13:00:40 -0700 Subject: [PATCH 44/91] Temp bump requests --- openhands/server/listen.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openhands/server/listen.py b/openhands/server/listen.py index ee097ecc0e61..b3988de819fc 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -87,7 +87,7 @@ app.add_middleware(NoCacheMiddleware) app.add_middleware( - RateLimitMiddleware, rate_limiter=InMemoryRateLimiter(requests=2, seconds=1) + RateLimitMiddleware, rate_limiter=InMemoryRateLimiter(requests=10, seconds=1) ) From e71deb9068ee8501e272431baff79f54e133f56c Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Wed, 20 Nov 2024 13:50:55 -0700 Subject: [PATCH 45/91] Redis integration --- list.txt | 1 - openhands/server/listen.py | 42 ++++--- openhands/server/session/agent_session.py | 8 +- openhands/server/session/manager.py | 147 +++++++++++++++++----- openhands/server/session/session.py | 29 ++--- 5 files changed, 153 insertions(+), 74 deletions(-) delete mode 100644 list.txt diff --git a/list.txt b/list.txt deleted file mode 100644 index 265e23731df8..000000000000 --- a/list.txt +++ /dev/null @@ -1 +0,0 @@ -tofarr diff --git a/openhands/server/listen.py b/openhands/server/listen.py index b3988de819fc..ecce27e1d9d7 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -1,3 +1,4 @@ +from contextlib import asynccontextmanager import os import re import tempfile @@ -73,10 +74,25 @@ config = load_app_config() file_store = get_file_store(config.file_store, config.file_store_path) -session_manager = SessionManager(config, file_store) +client_manager = None +redis_host = os.environ.get('REDIS_HOST') +if redis_host: + client_manager = socketio.AsyncRedisManager( + f'redis://{redis_host}', + redis_options={'password': os.environ.get('REDIS_PASSWORD')}, + ) +sio = socketio.AsyncServer( + async_mode='asgi', cors_allowed_origins='*', client_manager=client_manager +) +session_manager = SessionManager(sio, config, file_store) + +@asynccontextmanager +async def _lifespan(app: FastAPI): + async with session_manager: + yield -app = FastAPI() +app = FastAPI(lifespan=_lifespan) app.add_middleware( LocalhostCORSMiddleware, allow_credentials=True, @@ -840,16 +856,6 @@ async def get_response(self, path: str, scope): app.mount('/', SPAStaticFiles(directory='./frontend/build', html=True), name='dist') -client_manager = None -redis_host = os.environ.get('REDIS_HOST') -if redis_host: - client_manager = socketio.AsyncRedisManager( - f'redis://{redis_host}', - redis_options={'password': os.environ.get('REDIS_PASSWORD')}, - ) -sio = socketio.AsyncServer( - async_mode='asgi', cors_allowed_origins='*', client_manager=client_manager -) app = socketio.ASGIApp(sio, other_asgi_app=app) @@ -923,9 +929,7 @@ async def oh_action(connection_id: str, data: dict): return logger.info(f'sio:oh_action:{connection_id}') - session = session_manager.get_local_session(connection_id) - await session.dispatch(data) - + await session_manager.send_to_event_stream(connection_id, data) async def init_connection(connection_id: str, data: dict): gh_token = data.pop('github_token', None) @@ -949,13 +953,11 @@ async def init_connection(connection_id: str, data: dict): latest_event_id = int(data.pop('latest_event_id', -1)) # The session in question should exist, but may not actually be running locally... - session = await session_manager.init_or_join_local_session( - sio, sid, connection_id, data - ) + event_stream = await session_manager.init_or_join_session(sid, connection_id, data) # Send events async_stream = AsyncEventStreamWrapper( - session.agent_session.event_stream, latest_event_id + 1 + event_stream, latest_event_id + 1 ) async for event in async_stream: if isinstance( @@ -973,4 +975,4 @@ async def init_connection(connection_id: str, data: dict): @sio.event async def disconnect(connection_id: str): logger.info(f'sio:disconnect:{connection_id}') - await session_manager.disconnect_from_local_session(connection_id) + await session_manager.disconnect_from_session(connection_id) diff --git a/openhands/server/session/agent_session.py b/openhands/server/session/agent_session.py index 5b64187867bd..f0fdf247084a 100644 --- a/openhands/server/session/agent_session.py +++ b/openhands/server/session/agent_session.py @@ -14,6 +14,7 @@ from openhands.runtime.base import Runtime, RuntimeUnavailableError from openhands.security import SecurityAnalyzer, options from openhands.storage.files import FileStore +from openhands.utils.async_utils import call_async_from_sync class AgentSession: @@ -129,13 +130,8 @@ def close(self): """Closes the Agent session""" if self._closed: return - self._closed = True - - def inner_close(): - asyncio.run(self._close()) - - asyncio.get_event_loop().run_in_executor(None, inner_close) + call_async_from_sync(self._close) async def _close(self): if self.controller is not None: diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index 24db1d5bdcda..a0b2d7008c05 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -7,23 +7,57 @@ from openhands.core.config import AppConfig from openhands.core.logger import openhands_logger as logger from openhands.core.schema.agent import AgentState -from openhands.events.event import EventSource from openhands.events.observation.agent import AgentStateChangedObservation from openhands.events.serialization.event import event_to_dict -from openhands.events.stream import session_exists +from openhands.events.stream import EventStream, session_exists from openhands.runtime.base import RuntimeUnavailableError from openhands.server.session.conversation import Conversation -from openhands.server.session.session import Session +from openhands.server.session.session import ROOM_KEY, Session from openhands.storage.files import FileStore from openhands.utils.shutdown_listener import should_continue +_CONNECTION_KEY = "oh_session:{sid}" + @dataclass class SessionManager: + sio: socketio.AsyncServer config: AppConfig file_store: FileStore local_sessions_by_sid: dict[str, Session] = field(default_factory=dict) - local_sessions_by_connection_id: dict[str, Session] = field(default_factory=dict) + local_connection_id_to_session_id: dict[str, str] = field(default_factory=dict) + _redis_listen: bool = False + + async def __aenter__(self): + redis_client = self._get_redis_client() + if redis_client: + self._redis_listen_task = asyncio.create_task(self._redis_subscribe()) + return self + + async def __aexit__(self, exc_type, exc_value, traceback): + self._redis_listen_task.cancel() + + def _get_redis_client(self): + redis_client = getattr(self.sio.manager, "redis") + return redis_client + + async def _redis_subscribe(self): + """ + We use a redis backchannel to send actions between server nodes + """ + redis_client = self._get_redis_client() + pubsub = redis_client.pubsub() + await pubsub.subscribe("oh_event") + while should_continue(): + try: + message = await pubsub.get_message(ignore_subscribe_messages=True, timeout=None) + if message: + sid = message["sid"] + session = self.local_sessions_by_sid.get(sid) + if session: + session.dispatch(message["data"]) + except asyncio.CancelledError: + return async def attach_to_conversation(self, sid: str) -> Conversation | None: start_time = time.time() @@ -44,45 +78,98 @@ async def attach_to_conversation(self, sid: str) -> Conversation | None: async def detach_from_conversation(self, conversation: Conversation): await conversation.disconnect() - async def init_or_join_local_session(self, sio: socketio.AsyncServer, sid: str, connection_id: str, data: dict): - """ If there is no local session running, initialize one """ + async def init_or_join_session(self, sid: str, connection_id: str, data: dict): + await self.sio.enter_room(connection_id, ROOM_KEY.format(sid=sid)) + self.local_connection_id_to_session_id[connection_id] = sid + + # If we have a local session running, use that session = self.local_sessions_by_sid.get(sid) - if not session: - # I think we need to rehydrate here, but it does not seem to be working - session = Session( - sid=sid, file_store=self.file_store, config=self.config, sio=sio - ) - session.connect(connection_id) - self.local_sessions_by_sid[sid] = session - self.local_sessions_by_connection_id[connection_id] = session - await session.initialize_agent(data) - else: - session.connect(connection_id) - self.local_sessions_by_connection_id[connection_id] = session - session.agent_session.event_stream.add_event(AgentStateChangedObservation('', AgentState.INIT), EventSource.ENVIRONMENT) - return session + if session: + self.sio.emit(event_to_dict(AgentStateChangedObservation('', AgentState.INIT)), to=connection_id) + return session.agent_session.event_stream + + # If there is a remote session running, mark a connection to that + redis_client = self._get_redis_client() + if redis_client: + num_connections = await redis_client.rpush(_CONNECTION_KEY.format(sid=sid), connection_id) + # More than one remote connection implies session is already running remotely... + if num_connections != 1: + await self.sio.emit(event_to_dict(AgentStateChangedObservation('', AgentState.INIT)), to=connection_id) + event_stream = EventStream(sid, self.file_store) + return event_stream + + # Start a new local session + session = Session( + sid=sid, file_store=self.file_store, config=self.config, sio=self.sio + ) + self.local_sessions_by_sid[sid] = session + await session.initialize_agent(data) + return session.agent_session.event_stream - def get_local_session(self, connection_id: str) -> Session: - return self.local_sessions_by_connection_id[connection_id] - async def disconnect_from_local_session(self, connection_id: str): - session = self.local_sessions_by_connection_id.pop(connection_id, None) - if not session: + async def send_to_event_stream(self, connection_id: str, data: dict): + # If there is a local session running, send to that + sid = self.local_connection_id_to_session_id[connection_id] + session = self.local_sessions_by_sid.get(sid) + if session: + await session.dispatch(data) + return + + # If there is a remote session running, send to that + redis_client = self._get_redis_client() + if redis_client: + await redis_client.publish("oh_event", { + "sid": sid, + "data": data + }) + return + + raise RuntimeError(f'no_connected_session:{sid}') + + async def disconnect_from_session(self, connection_id: str): + sid = self.local_connection_id_to_session_id.pop(connection_id, None) + if not sid: # This can occur if the init action was never run. logger.warning(f'disconnect_from_uninitialized_session:{connection_id}') return - if session.disconnect(connection_id): + + # Disconnect from redis if present + redis_client = self._get_redis_client() + if redis_client: + await redis_client.lrem(_CONNECTION_KEY.format(sid=sid), 0, connection_id) + + session = self.local_sessions_by_sid.get(sid) + if session: if should_continue(): - asyncio.create_task(self._check_and_close_session(session)) + asyncio.create_task(self._check_and_close_session_later(session)) else: await self._check_and_close_session(session) - async def _check_and_close_session(self, session: Session): + async def _check_and_close_session_later(self, session: Session): # Once there have been no connections to a session for a reasonable period, we close it try: await asyncio.sleep(self.config.sandbox.close_delay) finally: # If the sleep was cancelled, we still want to close these - if not session.connection_ids: + await self._check_and_close_session(session) + + async def _check_and_close_session(self, session: Session): + # Get local connections + has_connections_for_session = next(( + True for v in self.local_connection_id_to_session_id.values() + if v == session.sid + ), False) + + # If no local connections, get connections through redis + if not has_connections_for_session: + redis_client = self._get_redis_client() + if redis_client: + key = _CONNECTION_KEY.format(sid=session.sid) + has_connections_for_session = bool(await redis_client.get(key)) + if not has_connections_for_session: + await redis_client.delete(key) + + # If no connections, close session + if not has_connections_for_session: session.close() - self.local_sessions_by_sid.pop(session.sid, None) \ No newline at end of file + self.local_sessions_by_sid.pop(session.sid, None) diff --git a/openhands/server/session/session.py b/openhands/server/session/session.py index 74cd4fbd1103..58089ec56b2c 100644 --- a/openhands/server/session/session.py +++ b/openhands/server/session/session.py @@ -23,13 +23,14 @@ from openhands.llm.llm import LLM from openhands.server.session.agent_session import AgentSession from openhands.storage.files import FileStore -from openhands.utils.async_utils import wait_all +from openhands.utils.async_utils import call_coro_in_bg_thread + +ROOM_KEY = "room:{sid}" class Session: sid: str sio: socketio.AsyncServer | None - connection_ids: set[str] last_active_ts: int = 0 is_alive: bool = True agent_session: AgentSession @@ -49,16 +50,8 @@ def __init__( EventStreamSubscriber.SERVER, self.on_event, self.sid ) self.config = config - self.connection_ids = set() self.loop = asyncio.get_event_loop() - def connect(self, connection_id: str): - self.connection_ids.add(connection_id) - - def disconnect(self, connection_id: str) -> bool: - self.connection_ids.remove(connection_id) - return not self.connection_ids - def close(self): self.is_alive = False self.agent_session.close() @@ -163,19 +156,21 @@ async def dispatch(self, data: dict): self.agent_session.event_stream.add_event(event, EventSource.USER) async def send(self, data: dict[str, object]) -> bool: - task = self.loop.create_task(self._send(data)) - await task - return task.result() + if asyncio.get_running_loop() != self.loop: + # Complete hack. Server whines about different event loops. This seems to shut it up, + # but means we don't get the result of the operation. I think this is okay, because + # we don't seem to care either way + self.loop.create_task(self._send(data)) + return True + return await self._send(data) async def _send(self, data: dict[str, object]) -> bool: try: if not self.is_alive: return False if self.sio: - await wait_all( - self.sio.emit("oh_event", data, to=connection_id) - for connection_id in self.connection_ids - ) + #await self.loop.create_task(self.sio.emit("oh_event", data, to=ROOM_KEY.format(sid=self.sid))) + await self.sio.emit("oh_event", data, to=ROOM_KEY.format(sid=self.sid)) await asyncio.sleep(0.001) # This flushes the data to the client self.last_active_ts = int(time.time()) return True From cc25b364806385d1eafaacc7532029a5e2c722db Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Wed, 20 Nov 2024 18:32:46 -0700 Subject: [PATCH 46/91] SIO Fixes --- openhands/server/listen.py | 1 - openhands/server/session/manager.py | 4 +--- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/openhands/server/listen.py b/openhands/server/listen.py index ecce27e1d9d7..522b74780d3c 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -965,7 +965,6 @@ async def init_connection(connection_id: str, data: dict): ( NullAction, NullObservation, - ChangeAgentStateAction, ), ): continue diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index a0b2d7008c05..1c8205168772 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -38,7 +38,7 @@ async def __aexit__(self, exc_type, exc_value, traceback): self._redis_listen_task.cancel() def _get_redis_client(self): - redis_client = getattr(self.sio.manager, "redis") + redis_client = getattr(self.sio.manager, "redis", None) return redis_client async def _redis_subscribe(self): @@ -85,7 +85,6 @@ async def init_or_join_session(self, sid: str, connection_id: str, data: dict): # If we have a local session running, use that session = self.local_sessions_by_sid.get(sid) if session: - self.sio.emit(event_to_dict(AgentStateChangedObservation('', AgentState.INIT)), to=connection_id) return session.agent_session.event_stream # If there is a remote session running, mark a connection to that @@ -94,7 +93,6 @@ async def init_or_join_session(self, sid: str, connection_id: str, data: dict): num_connections = await redis_client.rpush(_CONNECTION_KEY.format(sid=sid), connection_id) # More than one remote connection implies session is already running remotely... if num_connections != 1: - await self.sio.emit(event_to_dict(AgentStateChangedObservation('', AgentState.INIT)), to=connection_id) event_stream = EventStream(sid, self.file_store) return event_stream From eb7e8c393ac6a0a784b385ee5d8fa0459919746b Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Wed, 20 Nov 2024 18:55:29 -0700 Subject: [PATCH 47/91] WIP --- openhands/server/listen.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/openhands/server/listen.py b/openhands/server/listen.py index 522b74780d3c..a44b4544ca6e 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -1,9 +1,9 @@ -from contextlib import asynccontextmanager import os import re import tempfile import time import warnings +from contextlib import asynccontextmanager import jwt import requests @@ -46,7 +46,6 @@ from openhands.core.config import LLMConfig, load_app_config from openhands.core.logger import openhands_logger as logger from openhands.events.action import ( - ChangeAgentStateAction, FileReadAction, FileWriteAction, NullAction, @@ -92,6 +91,7 @@ async def _lifespan(app: FastAPI): async with session_manager: yield + app = FastAPI(lifespan=_lifespan) app.add_middleware( LocalhostCORSMiddleware, @@ -931,6 +931,7 @@ async def oh_action(connection_id: str, data: dict): logger.info(f'sio:oh_action:{connection_id}') await session_manager.send_to_event_stream(connection_id, data) + async def init_connection(connection_id: str, data: dict): gh_token = data.pop('github_token', None) if not await authenticate_github_user(gh_token): @@ -956,9 +957,7 @@ async def init_connection(connection_id: str, data: dict): event_stream = await session_manager.init_or_join_session(sid, connection_id, data) # Send events - async_stream = AsyncEventStreamWrapper( - event_stream, latest_event_id + 1 - ) + async_stream = AsyncEventStreamWrapper(event_stream, latest_event_id + 1) async for event in async_stream: if isinstance( event, From 684be9858e6bab3ab223f326ebd24a0ebb643d0b Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Wed, 20 Nov 2024 18:59:51 -0700 Subject: [PATCH 48/91] WIP --- openhands/server/session/manager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index 1c8205168772..cf8cfd846dbc 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -55,7 +55,7 @@ async def _redis_subscribe(self): sid = message["sid"] session = self.local_sessions_by_sid.get(sid) if session: - session.dispatch(message["data"]) + await session.dispatch(message["data"]) except asyncio.CancelledError: return From 373279fc481f26202768b16986f6659ede1345ee Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Wed, 20 Nov 2024 19:56:10 -0700 Subject: [PATCH 49/91] WIP --- openhands/server/session/manager.py | 26 +++++++++++++++----------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index cf8cfd846dbc..ee7ffd2d99b9 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -1,4 +1,5 @@ import asyncio +import json import time from dataclasses import dataclass, field @@ -26,7 +27,7 @@ class SessionManager: file_store: FileStore local_sessions_by_sid: dict[str, Session] = field(default_factory=dict) local_connection_id_to_session_id: dict[str, str] = field(default_factory=dict) - _redis_listen: bool = False + _redis_listen_task: asyncio.Task | None = None async def __aenter__(self): redis_client = self._get_redis_client() @@ -35,7 +36,9 @@ async def __aenter__(self): return self async def __aexit__(self, exc_type, exc_value, traceback): - self._redis_listen_task.cancel() + if self._redis_listen_task: + self._redis_listen_task.cancel() + self._redis_listen_task = None def _get_redis_client(self): redis_client = getattr(self.sio.manager, "redis", None) @@ -48,16 +51,17 @@ async def _redis_subscribe(self): redis_client = self._get_redis_client() pubsub = redis_client.pubsub() await pubsub.subscribe("oh_event") - while should_continue(): - try: - message = await pubsub.get_message(ignore_subscribe_messages=True, timeout=None) + try: + while should_continue(): + message = await pubsub.get_message(ignore_subscribe_messages=True, timeout=5) if message: - sid = message["sid"] + data = json.loads(message['data']) + sid = data["sid"] session = self.local_sessions_by_sid.get(sid) if session: - await session.dispatch(message["data"]) - except asyncio.CancelledError: - return + await session.dispatch(data) + except asyncio.CancelledError: + return async def attach_to_conversation(self, sid: str) -> Conversation | None: start_time = time.time() @@ -116,10 +120,10 @@ async def send_to_event_stream(self, connection_id: str, data: dict): # If there is a remote session running, send to that redis_client = self._get_redis_client() if redis_client: - await redis_client.publish("oh_event", { + await redis_client.publish("oh_event", json.dumps({ "sid": sid, "data": data - }) + })) return raise RuntimeError(f'no_connected_session:{sid}') From 6ba79493d949f81d95151f36614a463ef0b8350f Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Wed, 20 Nov 2024 20:21:56 -0700 Subject: [PATCH 50/91] Fix for redis error handling --- openhands/server/session/manager.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index ee7ffd2d99b9..3c005a7a75c0 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -7,9 +7,6 @@ from openhands.core.config import AppConfig from openhands.core.logger import openhands_logger as logger -from openhands.core.schema.agent import AgentState -from openhands.events.observation.agent import AgentStateChangedObservation -from openhands.events.serialization.event import event_to_dict from openhands.events.stream import EventStream, session_exists from openhands.runtime.base import RuntimeUnavailableError from openhands.server.session.conversation import Conversation @@ -51,17 +48,19 @@ async def _redis_subscribe(self): redis_client = self._get_redis_client() pubsub = redis_client.pubsub() await pubsub.subscribe("oh_event") - try: - while should_continue(): + while should_continue(): + try: message = await pubsub.get_message(ignore_subscribe_messages=True, timeout=5) if message: data = json.loads(message['data']) sid = data["sid"] session = self.local_sessions_by_sid.get(sid) if session: - await session.dispatch(data) - except asyncio.CancelledError: - return + await session.dispatch(data["data"]) + except asyncio.CancelledError: + return + except: + logger.warning("error_reading_from_redis", exc_info=True, stack_info=True) async def attach_to_conversation(self, sid: str) -> Conversation | None: start_time = time.time() From fc400fe8162eb1944067d532453b6c577173bbe8 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Thu, 21 Nov 2024 09:25:15 -0700 Subject: [PATCH 51/91] Fix for error on close --- openhands/server/session/manager.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index 3c005a7a75c0..c80409d83012 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -166,8 +166,9 @@ async def _check_and_close_session(self, session: Session): redis_client = self._get_redis_client() if redis_client: key = _CONNECTION_KEY.format(sid=session.sid) - has_connections_for_session = bool(await redis_client.get(key)) - if not has_connections_for_session: + connections_for_session = await redis_client.lrange(key, 0, -1) + testy = await redis_client.get(key+"no_exist") + if not connections_for_session: await redis_client.delete(key) # If no connections, close session From 949f825e08881212bae155194d6325945af3c7ae Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Thu, 21 Nov 2024 09:25:53 -0700 Subject: [PATCH 52/91] Fix for error on close --- openhands/server/session/manager.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index c80409d83012..d839c981347c 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -167,7 +167,6 @@ async def _check_and_close_session(self, session: Session): if redis_client: key = _CONNECTION_KEY.format(sid=session.sid) connections_for_session = await redis_client.lrange(key, 0, -1) - testy = await redis_client.get(key+"no_exist") if not connections_for_session: await redis_client.delete(key) From 03dba8b716dabe98a94324e1996c4538d0d1b3a5 Mon Sep 17 00:00:00 2001 From: tofarr Date: Thu, 21 Nov 2024 09:34:43 -0700 Subject: [PATCH 53/91] Update frontend/src/context/ws-client-provider.tsx Co-authored-by: Robert Brennan --- frontend/src/context/ws-client-provider.tsx | 2 -- 1 file changed, 2 deletions(-) diff --git a/frontend/src/context/ws-client-provider.tsx b/frontend/src/context/ws-client-provider.tsx index 1c85d282b48a..04b0b7b65eee 100644 --- a/frontend/src/context/ws-client-provider.tsx +++ b/frontend/src/context/ws-client-provider.tsx @@ -107,8 +107,6 @@ export function WsClientProvider({ } if (!event.token) { - // setStatus(WsClientProviderStatus.ACTIVE); - // } else { handleAssistantMessage(event); } } From 824b7ec0daaeda44901202dc81fb7d089261946b Mon Sep 17 00:00:00 2001 From: tofarr Date: Thu, 21 Nov 2024 09:35:50 -0700 Subject: [PATCH 54/91] Update openhands/server/session/session.py Co-authored-by: Robert Brennan --- openhands/server/session/session.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openhands/server/session/session.py b/openhands/server/session/session.py index 58089ec56b2c..6e296b096b9a 100644 --- a/openhands/server/session/session.py +++ b/openhands/server/session/session.py @@ -169,7 +169,6 @@ async def _send(self, data: dict[str, object]) -> bool: if not self.is_alive: return False if self.sio: - #await self.loop.create_task(self.sio.emit("oh_event", data, to=ROOM_KEY.format(sid=self.sid))) await self.sio.emit("oh_event", data, to=ROOM_KEY.format(sid=self.sid)) await asyncio.sleep(0.001) # This flushes the data to the client self.last_active_ts = int(time.time()) From 3615a99a8141c481d36790d90ab0232cf45fa859 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Thu, 21 Nov 2024 09:48:21 -0700 Subject: [PATCH 55/91] Readme update --- openhands/server/session/README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openhands/server/session/README.md b/openhands/server/session/README.md index bf77989f166b..b367ba586495 100644 --- a/openhands/server/session/README.md +++ b/openhands/server/session/README.md @@ -8,7 +8,8 @@ interruptions are recoverable. There are 3 main server side event handlers: * `connect` - Invoked when a new connection to the server is established. (This may be via http or WebSocket) -* `oh_action` - Invoked when a connected client sends an event (Such as `INIT` or a prompt for the Agent) +* `oh_action` - Invoked when a connected client sends an event (Such as `INIT` or a prompt for the Agent) - + this is distinct from the `oh_event` sent from the server to the client. * `disconnect` - Invoked when a connected client disconnects from the server. ## Init From 8027f81454d26ecaf530293b3e64a15c54858192 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Thu, 21 Nov 2024 10:24:39 -0700 Subject: [PATCH 56/91] Fix for error if local connection not found --- openhands/server/session/manager.py | 11 ++++++----- openhands/server/session/session.py | 17 +++++++---------- 2 files changed, 13 insertions(+), 15 deletions(-) diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index d839c981347c..0debb19b0caf 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -110,11 +110,12 @@ async def init_or_join_session(self, sid: str, connection_id: str, data: dict): async def send_to_event_stream(self, connection_id: str, data: dict): # If there is a local session running, send to that - sid = self.local_connection_id_to_session_id[connection_id] - session = self.local_sessions_by_sid.get(sid) - if session: - await session.dispatch(data) - return + sid = self.local_connection_id_to_session_id.get(connection_id) + if sid: + session = self.local_sessions_by_sid.get(sid) + if session: + await session.dispatch(data) + return # If there is a remote session running, send to that redis_client = self._get_redis_client() diff --git a/openhands/server/session/session.py b/openhands/server/session/session.py index 6e296b096b9a..38307b186bbe 100644 --- a/openhands/server/session/session.py +++ b/openhands/server/session/session.py @@ -155,14 +155,11 @@ async def dispatch(self, data: dict): return self.agent_session.event_stream.add_event(event, EventSource.USER) - async def send(self, data: dict[str, object]) -> bool: + async def send(self, data: dict[str, object]): if asyncio.get_running_loop() != self.loop: - # Complete hack. Server whines about different event loops. This seems to shut it up, - # but means we don't get the result of the operation. I think this is okay, because - # we don't seem to care either way self.loop.create_task(self._send(data)) - return True - return await self._send(data) + return + await self._send(data) async def _send(self, data: dict[str, object]) -> bool: try: @@ -178,16 +175,16 @@ async def _send(self, data: dict[str, object]) -> bool: self.is_alive = False return False - async def send_error(self, message: str) -> bool: + async def send_error(self, message: str): """Sends an error message to the client.""" - return await self.send({'error': True, 'message': message}) + await self.send({'error': True, 'message': message}) - async def _send_status_message(self, msg_type: str, id: str, message: str) -> bool: + async def _send_status_message(self, msg_type: str, id: str, message: str): """Sends a status message to the client.""" if msg_type == 'error': await self.agent_session.stop_agent_loop_for_error() - return await self.send( + await self.send( {'status_update': True, 'type': msg_type, 'id': id, 'message': message} ) From a68d39b46d04cc09b849bcb0ebd0a9f069d54dd4 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Thu, 21 Nov 2024 13:16:36 -0700 Subject: [PATCH 57/91] Fire close --- openhands/server/session/manager.py | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index 0debb19b0caf..f6b0c045482e 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -143,35 +143,35 @@ async def disconnect_from_session(self, connection_id: str): session = self.local_sessions_by_sid.get(sid) if session: if should_continue(): - asyncio.create_task(self._check_and_close_session_later(session)) + asyncio.create_task(self._close_orphaned_session_later(session)) else: - await self._check_and_close_session(session) + await self._close_orphaned_session(session, True) - async def _check_and_close_session_later(self, session: Session): + async def _close_orphaned_session_later(self, session: Session): # Once there have been no connections to a session for a reasonable period, we close it try: await asyncio.sleep(self.config.sandbox.close_delay) finally: # If the sleep was cancelled, we still want to close these - await self._check_and_close_session(session) + await self._close_orphaned_session(session, False) - async def _check_and_close_session(self, session: Session): + async def _close_orphaned_session(self, session: Session, force: bool): # Get local connections - has_connections_for_session = next(( + has_local_connections = next(( True for v in self.local_connection_id_to_session_id.values() if v == session.sid ), False) # If no local connections, get connections through redis - if not has_connections_for_session: - redis_client = self._get_redis_client() - if redis_client: - key = _CONNECTION_KEY.format(sid=session.sid) - connections_for_session = await redis_client.lrange(key, 0, -1) - if not connections_for_session: - await redis_client.delete(key) + redis_connections = None + redis_client = self._get_redis_client() + if redis_client: + key = _CONNECTION_KEY.format(sid=session.sid) + redis_connections = await redis_client.lrange(key, 0, -1) + if not redis_connections: + await redis_client.delete(key) # If no connections, close session - if not has_connections_for_session: + if force or (not has_local_connections and not redis_connections): session.close() self.local_sessions_by_sid.pop(session.sid, None) From 1736ebb278d509512708f91d10f368f2068115fd Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Thu, 21 Nov 2024 14:05:24 -0700 Subject: [PATCH 58/91] Sending message to restart runtime --- openhands/server/session/manager.py | 23 +++++++++++++++++++---- openhands/server/session/session.py | 3 +++ 2 files changed, 22 insertions(+), 4 deletions(-) diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index f6b0c045482e..0b96408b97b1 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -54,9 +54,15 @@ async def _redis_subscribe(self): if message: data = json.loads(message['data']) sid = data["sid"] - session = self.local_sessions_by_sid.get(sid) - if session: - await session.dispatch(data["data"]) + message_type = data["message_type"] + if message_type == "event": + session = self.local_sessions_by_sid.get(sid) + if session: + await session.dispatch(data["data"]) + elif message_type == "restart": + connection_id = data["connection_id"] + if self.local_connection_id_to_session_id.get(connection_id) == sid: + self.init_or_join_session(sid, connection_id, data["settings"]) except asyncio.CancelledError: return except: @@ -122,7 +128,8 @@ async def send_to_event_stream(self, connection_id: str, data: dict): if redis_client: await redis_client.publish("oh_event", json.dumps({ "sid": sid, - "data": data + "message_type": "event", + "data": data, })) return @@ -171,6 +178,14 @@ async def _close_orphaned_session(self, session: Session, force: bool): if not redis_connections: await redis_client.delete(key) + if force and redis_connections: + await redis_client.publish("oh_event", json.dumps({ + "sid": session.sid, + "message_type": "restart", + "connection_id": redis_connections[0].decode(), + "settings": session.settings, + })) + # If no connections, close session if force or (not has_local_connections and not redis_connections): session.close() diff --git a/openhands/server/session/session.py b/openhands/server/session/session.py index 38307b186bbe..56ee2a3f3421 100644 --- a/openhands/server/session/session.py +++ b/openhands/server/session/session.py @@ -36,6 +36,7 @@ class Session: agent_session: AgentSession loop: asyncio.AbstractEventLoop config: AppConfig + settings: dict def __init__( self, sid: str, config: AppConfig, file_store: FileStore, sio: socketio.AsyncServer | None @@ -51,12 +52,14 @@ def __init__( ) self.config = config self.loop = asyncio.get_event_loop() + self.settings = None def close(self): self.is_alive = False self.agent_session.close() async def initialize_agent(self, data: dict): + self.settings = data self.agent_session.event_stream.add_event( AgentStateChangedObservation('', AgentState.LOADING), EventSource.ENVIRONMENT, From 70f347ced23b6bc5b9ab7bf6ce583eca6ebce93e Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Thu, 21 Nov 2024 14:09:04 -0700 Subject: [PATCH 59/91] Lint fix --- openhands/server/session/session.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openhands/server/session/session.py b/openhands/server/session/session.py index 56ee2a3f3421..8fb98dabac71 100644 --- a/openhands/server/session/session.py +++ b/openhands/server/session/session.py @@ -36,7 +36,7 @@ class Session: agent_session: AgentSession loop: asyncio.AbstractEventLoop config: AppConfig - settings: dict + settings: dict | None def __init__( self, sid: str, config: AppConfig, file_store: FileStore, sio: socketio.AsyncServer | None From 13b72199c09eb1025e34271ee502421d9c2df00b Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Thu, 21 Nov 2024 14:58:50 -0700 Subject: [PATCH 60/91] WIP --- openhands/server/session/manager.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index 0b96408b97b1..76443bc4f415 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -62,7 +62,7 @@ async def _redis_subscribe(self): elif message_type == "restart": connection_id = data["connection_id"] if self.local_connection_id_to_session_id.get(connection_id) == sid: - self.init_or_join_session(sid, connection_id, data["settings"]) + await self.init_or_join_session(sid, connection_id, data["settings"]) except asyncio.CancelledError: return except: @@ -175,14 +175,21 @@ async def _close_orphaned_session(self, session: Session, force: bool): if redis_client: key = _CONNECTION_KEY.format(sid=session.sid) redis_connections = await redis_client.lrange(key, 0, -1) + redis_connections = [ + c.decode() for c in redis_connections + ] if not redis_connections: await redis_client.delete(key) - + redis_connections = [ + c for c in redis_connections + if c not in self.local_connection_id_to_session_id + ] + if force and redis_connections: await redis_client.publish("oh_event", json.dumps({ "sid": session.sid, "message_type": "restart", - "connection_id": redis_connections[0].decode(), + "connection_id": redis_connections[0], "settings": session.settings, })) From b7d0e4bf34586c526ff3205d53ecdc1fabbc3140 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Thu, 21 Nov 2024 15:32:06 -0700 Subject: [PATCH 61/91] WIP --- openhands/server/listen.py | 56 -------------------------------------- 1 file changed, 56 deletions(-) diff --git a/openhands/server/listen.py b/openhands/server/listen.py index 06f0d6d00bf4..699778692872 100644 --- a/openhands/server/listen.py +++ b/openhands/server/listen.py @@ -917,62 +917,6 @@ async def connect(connection_id: str, environ): @sio.event async def oh_action(connection_id: str, data: dict): - """WebSocket endpoint for receiving events from the client (i.e., the browser). - Once connected, the client can send various actions: - - Initialize the agent: - session management, and event streaming. - ```json - {"action": "initialize", "args": {"LLM_MODEL": "ollama/llama3", "AGENT": "CodeActAgent", "LANGUAGE": "en", "LLM_API_KEY": "ollama"}} - - Args: - ``` - websocket (WebSocket): The WebSocket connection object. - - Start a new development task: - ```json - {"action": "start", "args": {"task": "write a bash script that prints hello"}} - ``` - - Send a message: - ```json - {"action": "message", "args": {"content": "Hello, how are you?", "image_urls": ["base64_url1", "base64_url2"]}} - ``` - - Write contents to a file: - ```json - {"action": "write", "args": {"path": "./greetings.txt", "content": "Hello, OpenHands?"}} - ``` - - Read the contents of a file: - ```json - {"action": "read", "args": {"path": "./greetings.txt"}} - ``` - - Run a command: - ```json - {"action": "run", "args": {"command": "ls -l", "thought": "", "confirmation_state": "confirmed"}} - ``` - - Run an IPython command: - ```json - {"action": "run_ipython", "args": {"command": "print('Hello, IPython!')"}} - ``` - - Open a web page: - ```json - {"action": "browse", "args": {"url": "https://arxiv.org/html/2402.01030v2"}} - ``` - - Add a task to the root_task: - ```json - {"action": "add_task", "args": {"task": "Implement feature X"}} - ``` - - Update a task in the root_task: - ```json - {"action": "modify_task", "args": {"id": "0", "state": "in_progress", "thought": ""}} - ``` - - Change the agent's state: - ```json - {"action": "change_agent_state", "args": {"state": "paused"}} - ``` - - Finish the task: - ```json - {"action": "finish", "args": {}} - ``` - """ - # If it's an init, we do it here. action = data.get('action', '') if action == ActionType.INIT: From 96a07fc66c6cbaf0a7ebbd8ca0e2854cf9179aac Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 22 Nov 2024 08:17:29 -0700 Subject: [PATCH 62/91] Better debugging --- openhands/runtime/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openhands/runtime/base.py b/openhands/runtime/base.py index 74891a7d52b0..ea93410c6836 100644 --- a/openhands/runtime/base.py +++ b/openhands/runtime/base.py @@ -196,7 +196,7 @@ async def on_event(self, event: Event) -> None: e, RuntimeDisconnectedError ): err_id = 'STATUS$ERROR_RUNTIME_DISCONNECTED' - self.log('error', f'Unexpected error while running action {e}') + logger.error('Unexpected error while running action', exc_info=True, stack_info=True) self.log('error', f'Problematic action: {str(event)}') self.send_error_message(err_id, str(e)) self.close() From 98789d59736fc38cbd64e564e0051d51087aeed9 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 22 Nov 2024 08:21:36 -0700 Subject: [PATCH 63/91] Lint fix --- openhands/runtime/base.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openhands/runtime/base.py b/openhands/runtime/base.py index ea93410c6836..87d42cbb0d4f 100644 --- a/openhands/runtime/base.py +++ b/openhands/runtime/base.py @@ -196,7 +196,11 @@ async def on_event(self, event: Event) -> None: e, RuntimeDisconnectedError ): err_id = 'STATUS$ERROR_RUNTIME_DISCONNECTED' - logger.error('Unexpected error while running action', exc_info=True, stack_info=True) + logger.error( + 'Unexpected error while running action', + exc_info=True, + stack_info=True, + ) self.log('error', f'Problematic action: {str(event)}') self.send_error_message(err_id, str(e)) self.close() From 108765b58a24dc35fc46fdb564e070c72e30430e Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 22 Nov 2024 08:25:15 -0700 Subject: [PATCH 64/91] Lint fix --- openhands/runtime/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openhands/runtime/base.py b/openhands/runtime/base.py index 87d42cbb0d4f..4c5f577ec133 100644 --- a/openhands/runtime/base.py +++ b/openhands/runtime/base.py @@ -200,7 +200,7 @@ async def on_event(self, event: Event) -> None: 'Unexpected error while running action', exc_info=True, stack_info=True, - ) + ) self.log('error', f'Problematic action: {str(event)}') self.send_error_message(err_id, str(e)) self.close() From 4f30b383421d9816a450d526ffb2f149dae17538 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 22 Nov 2024 12:01:46 -0700 Subject: [PATCH 65/91] Fix for list including current directory --- openhands/storage/s3.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openhands/storage/s3.py b/openhands/storage/s3.py index 3823db17c705..207751e98129 100644 --- a/openhands/storage/s3.py +++ b/openhands/storage/s3.py @@ -30,6 +30,8 @@ def read(self, path: str) -> str: raise FileNotFoundError(f'Failed to read from S3 at path {path}: {e}') def list(self, path: str) -> list[str]: + if path and path != "/" and not path.endswith("/"): + path += "/" try: return [ obj.object_name for obj in self.client.list_objects(self.bucket, path) From ef1c78ae4588d7a9b6bfacffe26eed0e0b1d3095 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 22 Nov 2024 12:06:05 -0700 Subject: [PATCH 66/91] Lint fixes --- openhands/storage/s3.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openhands/storage/s3.py b/openhands/storage/s3.py index 207751e98129..a9f187537973 100644 --- a/openhands/storage/s3.py +++ b/openhands/storage/s3.py @@ -30,8 +30,8 @@ def read(self, path: str) -> str: raise FileNotFoundError(f'Failed to read from S3 at path {path}: {e}') def list(self, path: str) -> list[str]: - if path and path != "/" and not path.endswith("/"): - path += "/" + if path and path != '/' and not path.endswith('/'): + path += '/' try: return [ obj.object_name for obj in self.client.list_objects(self.bucket, path) From 94682d9ef0a34fd1d65a520ecd8d675522bcf2d9 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 22 Nov 2024 13:04:43 -0700 Subject: [PATCH 67/91] More debugging --- openhands/server/session/manager.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index 76443bc4f415..cb775e46ca1a 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -94,18 +94,22 @@ async def init_or_join_session(self, sid: str, connection_id: str, data: dict): # If we have a local session running, use that session = self.local_sessions_by_sid.get(sid) if session: + logger.info(f'found_local_session:{sid}') return session.agent_session.event_stream # If there is a remote session running, mark a connection to that redis_client = self._get_redis_client() if redis_client: num_connections = await redis_client.rpush(_CONNECTION_KEY.format(sid=sid), connection_id) + logger.info(f'num_redis_connections:{sid}:{num_connections}') # More than one remote connection implies session is already running remotely... if num_connections != 1: + logger.info('session_running_elsewhere_in_cluster:{sid}') event_stream = EventStream(sid, self.file_store) return event_stream # Start a new local session + logger.info('start_new_local_session:{sid}') session = Session( sid=sid, file_store=self.file_store, config=self.config, sio=self.sio ) @@ -145,10 +149,12 @@ async def disconnect_from_session(self, connection_id: str): # Disconnect from redis if present redis_client = self._get_redis_client() if redis_client: + logger.info('disconnect_connection_from_session:{connection_id}:{sid}') await redis_client.lrem(_CONNECTION_KEY.format(sid=sid), 0, connection_id) session = self.local_sessions_by_sid.get(sid) if session: + logger.info('close_session:{connection_id}:{sid}') if should_continue(): asyncio.create_task(self._close_orphaned_session_later(session)) else: @@ -178,6 +184,7 @@ async def _close_orphaned_session(self, session: Session, force: bool): redis_connections = [ c.decode() for c in redis_connections ] + logger.info('close_orphaned_session:{redis_connections}') if not redis_connections: await redis_client.delete(key) redis_connections = [ From 174219d08c1aae200e131fa8c7fabe04a74203a2 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 22 Nov 2024 15:31:00 -0700 Subject: [PATCH 68/91] LOG ALL THE THINGS!!! --- openhands/server/session/manager.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index cb775e46ca1a..a86aa35d9c6b 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -104,12 +104,12 @@ async def init_or_join_session(self, sid: str, connection_id: str, data: dict): logger.info(f'num_redis_connections:{sid}:{num_connections}') # More than one remote connection implies session is already running remotely... if num_connections != 1: - logger.info('session_running_elsewhere_in_cluster:{sid}') + logger.info(f'session_running_elsewhere_in_cluster:{sid}') event_stream = EventStream(sid, self.file_store) return event_stream # Start a new local session - logger.info('start_new_local_session:{sid}') + logger.info(f'start_new_local_session:{sid}') session = Session( sid=sid, file_store=self.file_store, config=self.config, sio=self.sio ) @@ -149,12 +149,12 @@ async def disconnect_from_session(self, connection_id: str): # Disconnect from redis if present redis_client = self._get_redis_client() if redis_client: - logger.info('disconnect_connection_from_session:{connection_id}:{sid}') + logger.info(f'disconnect_connection_from_session:{connection_id}:{sid}') await redis_client.lrem(_CONNECTION_KEY.format(sid=sid), 0, connection_id) session = self.local_sessions_by_sid.get(sid) if session: - logger.info('close_session:{connection_id}:{sid}') + logger.info(f'close_session:{connection_id}:{sid}') if should_continue(): asyncio.create_task(self._close_orphaned_session_later(session)) else: @@ -184,15 +184,17 @@ async def _close_orphaned_session(self, session: Session, force: bool): redis_connections = [ c.decode() for c in redis_connections ] - logger.info('close_orphaned_session:{redis_connections}') + logger.info(f'close_orphaned_session:{redis_connections}') if not redis_connections: await redis_client.delete(key) redis_connections = [ c for c in redis_connections if c not in self.local_connection_id_to_session_id ] + logger.info(f'close_orphaned_session:2:{redis_connections}') if force and redis_connections: + logger.info(f'transferring_session') await redis_client.publish("oh_event", json.dumps({ "sid": session.sid, "message_type": "restart", @@ -202,5 +204,6 @@ async def _close_orphaned_session(self, session: Session, force: bool): # If no connections, close session if force or (not has_local_connections and not redis_connections): + logger.info(f'do_close_session') session.close() self.local_sessions_by_sid.pop(session.sid, None) From dd5fc6f5edac83fd245d233aa50a765f89c31e64 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 22 Nov 2024 16:05:49 -0700 Subject: [PATCH 69/91] Reorder disconnect --- openhands/server/session/manager.py | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index a86aa35d9c6b..1b2ab5b59b39 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -145,30 +145,24 @@ async def disconnect_from_session(self, connection_id: str): # This can occur if the init action was never run. logger.warning(f'disconnect_from_uninitialized_session:{connection_id}') return - - # Disconnect from redis if present - redis_client = self._get_redis_client() - if redis_client: - logger.info(f'disconnect_connection_from_session:{connection_id}:{sid}') - await redis_client.lrem(_CONNECTION_KEY.format(sid=sid), 0, connection_id) session = self.local_sessions_by_sid.get(sid) if session: logger.info(f'close_session:{connection_id}:{sid}') if should_continue(): - asyncio.create_task(self._close_orphaned_session_later(session)) + asyncio.create_task(self._cleanup_session_later(session, connection_id)) else: - await self._close_orphaned_session(session, True) + await self._cleanup_session(session, connection_id, True) - async def _close_orphaned_session_later(self, session: Session): + async def _cleanup_session_later(self, session: Session, connection_id: str): # Once there have been no connections to a session for a reasonable period, we close it try: await asyncio.sleep(self.config.sandbox.close_delay) finally: # If the sleep was cancelled, we still want to close these - await self._close_orphaned_session(session, False) + await self._cleanup_session(session, connection_id, False) - async def _close_orphaned_session(self, session: Session, force: bool): + async def _cleanup_session(self, session: Session, connection_id: str, force: bool): # Get local connections has_local_connections = next(( True for v in self.local_connection_id_to_session_id.values() @@ -180,6 +174,7 @@ async def _close_orphaned_session(self, session: Session, force: bool): redis_client = self._get_redis_client() if redis_client: key = _CONNECTION_KEY.format(sid=session.sid) + await redis_client.lrem(key, 0, connection_id) redis_connections = await redis_client.lrange(key, 0, -1) redis_connections = [ c.decode() for c in redis_connections From 0d5dda065eec1f9fd2df6eeca45321a0d8ee94ca Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 22 Nov 2024 17:25:31 -0700 Subject: [PATCH 70/91] More logging --- openhands/server/session/manager.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index 1b2ab5b59b39..855a6f36410b 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -60,8 +60,10 @@ async def _redis_subscribe(self): if session: await session.dispatch(data["data"]) elif message_type == "restart": + logger.info("got_transfer_request") connection_id = data["connection_id"] if self.local_connection_id_to_session_id.get(connection_id) == sid: + logger.info("transferring_session_to_local") await self.init_or_join_session(sid, connection_id, data["settings"]) except asyncio.CancelledError: return From 5915a31ef8f450e5036d41a277dd2f039cc2e77a Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 22 Nov 2024 19:58:33 -0700 Subject: [PATCH 71/91] More logs --- openhands/server/session/manager.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index 855a6f36410b..a595d8577d99 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -53,6 +53,7 @@ async def _redis_subscribe(self): message = await pubsub.get_message(ignore_subscribe_messages=True, timeout=5) if message: data = json.loads(message['data']) + logger.info(f"got_published_message:{message}") sid = data["sid"] message_type = data["message_type"] if message_type == "event": From 512a139adf0a108253dfd19197e3313371bc9868 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Sat, 23 Nov 2024 08:06:33 -0700 Subject: [PATCH 72/91] Now querying cluster before starting session --- openhands/server/session/manager.py | 67 ++++++++++++++++++----------- 1 file changed, 43 insertions(+), 24 deletions(-) diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index a595d8577d99..4537d794fe04 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -2,6 +2,7 @@ import json import time from dataclasses import dataclass, field +from typing import Callable import socketio @@ -12,9 +13,11 @@ from openhands.server.session.conversation import Conversation from openhands.server.session.session import ROOM_KEY, Session from openhands.storage.files import FileStore +from openhands.utils.async_utils import wait_all from openhands.utils.shutdown_listener import should_continue _CONNECTION_KEY = "oh_session:{sid}" +_SESSION_RUNNING_TIMEOUT = 1.5 @dataclass @@ -25,6 +28,7 @@ class SessionManager: local_sessions_by_sid: dict[str, Session] = field(default_factory=dict) local_connection_id_to_session_id: dict[str, str] = field(default_factory=dict) _redis_listen_task: asyncio.Task | None = None + _session_is_running_flags: dict[str, asyncio.Event] = field(default_factory=dict) async def __aenter__(self): redis_client = self._get_redis_client() @@ -60,12 +64,17 @@ async def _redis_subscribe(self): session = self.local_sessions_by_sid.get(sid) if session: await session.dispatch(data["data"]) - elif message_type == "restart": - logger.info("got_transfer_request") - connection_id = data["connection_id"] - if self.local_connection_id_to_session_id.get(connection_id) == sid: - logger.info("transferring_session_to_local") - await self.init_or_join_session(sid, connection_id, data["settings"]) + elif message_type == "is_session_running": + session = self.local_sessions_by_sid.get(sid) + if session: + await redis_client.publish("oh_event", json.dumps({ + "sid": sid, + "message_type": "session_is_running" + })) + elif message_type == "session_is_running": + flag = self._session_is_running_flags.get(sid) + if flag: + flag.set() except asyncio.CancelledError: return except: @@ -100,16 +109,35 @@ async def init_or_join_session(self, sid: str, connection_id: str, data: dict): logger.info(f'found_local_session:{sid}') return session.agent_session.event_stream - # If there is a remote session running, mark a connection to that + # If there is a remote session running, retrieve existing events for that redis_client = self._get_redis_client() - if redis_client: - num_connections = await redis_client.rpush(_CONNECTION_KEY.format(sid=sid), connection_id) - logger.info(f'num_redis_connections:{sid}:{num_connections}') - # More than one remote connection implies session is already running remotely... - if num_connections != 1: - logger.info(f'session_running_elsewhere_in_cluster:{sid}') - event_stream = EventStream(sid, self.file_store) - return event_stream + if redis_client and await self._is_session_running_in_cluster(sid): + return EventStream(sid, self.file_store) + + return await self.start_local_session(sid, data) + + async def _is_session_running_in_cluster(self, sid: str) -> bool: + """ As the rest of the cluster if a session is running. Wait a for a short timeout for a reply """ + # Create a flag for the callback + flag = asyncio.Event() + self._session_is_running_flags[sid] = flag + try: + await self._get_redis_client().publish("oh_event", json.dumps({ + "sid": sid, + "message_type": "is_session_running", + })) + async with asyncio.timeout(_SESSION_RUNNING_TIMEOUT): + await flag.wait() + + result = flag.is_set() + return result + except TimeoutError: + # Nobody replied in time + return False + finally: + self._session_is_running_flags.pop(sid) + + async def start_local_session(self, sid: str, data: dict): # Start a new local session logger.info(f'start_new_local_session:{sid}') @@ -191,15 +219,6 @@ async def _cleanup_session(self, session: Session, connection_id: str, force: bo ] logger.info(f'close_orphaned_session:2:{redis_connections}') - if force and redis_connections: - logger.info(f'transferring_session') - await redis_client.publish("oh_event", json.dumps({ - "sid": session.sid, - "message_type": "restart", - "connection_id": redis_connections[0], - "settings": session.settings, - })) - # If no connections, close session if force or (not has_local_connections and not redis_connections): logger.info(f'do_close_session') From cecac3fa202b5cdca6c4815264b0c127db477479 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Sat, 23 Nov 2024 08:38:35 -0700 Subject: [PATCH 73/91] Announcing session closing (When shutdown is graceful) --- openhands/server/session/manager.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index 4537d794fe04..70bada94e557 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -65,6 +65,7 @@ async def _redis_subscribe(self): if session: await session.dispatch(data["data"]) elif message_type == "is_session_running": + # Another node in the cluster is asking if the current node is running the session given. session = self.local_sessions_by_sid.get(sid) if session: await redis_client.publish("oh_event", json.dumps({ @@ -75,6 +76,13 @@ async def _redis_subscribe(self): flag = self._session_is_running_flags.get(sid) if flag: flag.set() + elif message_type == "session_closing": + logger.info(f"session_closing:{sid}") + for connection_id, local_sid in self.local_connection_id_to_session_id.items(): + if sid == local_sid: + logger.warning('local_connection_to_closing_session') + + except asyncio.CancelledError: return except: @@ -221,6 +229,14 @@ async def _cleanup_session(self, session: Session, connection_id: str, force: bo # If no connections, close session if force or (not has_local_connections and not redis_connections): + + # We alert the cluster in case they are interested + if redis_client: + await redis_client.publish("oh_event", json.dumps({ + "sid": session.sid, + "message_type": "session_closing" + })) + logger.info(f'do_close_session') session.close() self.local_sessions_by_sid.pop(session.sid, None) From 2802a8928a62ceb4b5b7f0ddf423ecd906a43518 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Mon, 25 Nov 2024 08:52:22 -0700 Subject: [PATCH 74/91] Unit tests for manager --- openhands/server/session/manager.py | 220 +++++++++++++++++----------- tests/unit/test_manager.py | 174 ++++++++++++++++++++++ 2 files changed, 309 insertions(+), 85 deletions(-) create mode 100644 tests/unit/test_manager.py diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index 70bada94e557..dd578be6b613 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -2,7 +2,6 @@ import json import time from dataclasses import dataclass, field -from typing import Callable import socketio @@ -13,11 +12,10 @@ from openhands.server.session.conversation import Conversation from openhands.server.session.session import ROOM_KEY, Session from openhands.storage.files import FileStore -from openhands.utils.async_utils import wait_all from openhands.utils.shutdown_listener import should_continue -_CONNECTION_KEY = "oh_session:{sid}" -_SESSION_RUNNING_TIMEOUT = 1.5 +_REDIS_POLL_TIMEOUT = 1.5 +_CHECK_ALIVE_INTERVAL = 15 @dataclass @@ -27,8 +25,10 @@ class SessionManager: file_store: FileStore local_sessions_by_sid: dict[str, Session] = field(default_factory=dict) local_connection_id_to_session_id: dict[str, str] = field(default_factory=dict) + _last_alive_timestamps: dict[str, float] = field(default_factory=dict) _redis_listen_task: asyncio.Task | None = None _session_is_running_flags: dict[str, asyncio.Event] = field(default_factory=dict) + _has_remote_connections_flags: dict[str, asyncio.Event] = field(default_factory=dict) async def __aenter__(self): redis_client = self._get_redis_client() @@ -56,37 +56,58 @@ async def _redis_subscribe(self): try: message = await pubsub.get_message(ignore_subscribe_messages=True, timeout=5) if message: - data = json.loads(message['data']) - logger.info(f"got_published_message:{message}") - sid = data["sid"] - message_type = data["message_type"] - if message_type == "event": - session = self.local_sessions_by_sid.get(sid) - if session: - await session.dispatch(data["data"]) - elif message_type == "is_session_running": - # Another node in the cluster is asking if the current node is running the session given. - session = self.local_sessions_by_sid.get(sid) - if session: - await redis_client.publish("oh_event", json.dumps({ - "sid": sid, - "message_type": "session_is_running" - })) - elif message_type == "session_is_running": - flag = self._session_is_running_flags.get(sid) - if flag: - flag.set() - elif message_type == "session_closing": - logger.info(f"session_closing:{sid}") - for connection_id, local_sid in self.local_connection_id_to_session_id.items(): - if sid == local_sid: - logger.warning('local_connection_to_closing_session') - - + await self._process_message(message) except asyncio.CancelledError: return except: - logger.warning("error_reading_from_redis", exc_info=True, stack_info=True) + try: + asyncio.get_running_loop() + logger.warning("error_reading_from_redis", exc_info=True, stack_info=True) + except RuntimeError: + return # Loop has been shut down + + async def _process_message(self, message: dict): + data = json.loads(message['data']) + logger.info(f"got_published_message:{message}") + sid = data["sid"] + message_type = data["message_type"] + if message_type == "event": + session = self.local_sessions_by_sid.get(sid) + if session: + await session.dispatch(data["data"]) + elif message_type == "is_session_running": + # Another node in the cluster is asking if the current node is running the session given. + session = self.local_sessions_by_sid.get(sid) + if session: + await self._get_redis_client().publish("oh_event", json.dumps({ + "sid": sid, + "message_type": "session_is_running" + })) + elif message_type == "session_is_running": + self._last_alive_timestamps[sid] = time.time() + flag = self._session_is_running_flags.get(sid) + if flag: + flag.set() + elif message_type == "has_remote_connections_query": + # Another node in the cluster is asking if the current node is connected to a session + required = sid in self.local_connection_id_to_session_id.values() + if required: + await self._get_redis_client().publish("oh_event", json.dumps({ + "sid": sid, + "message_type": "has_remote_connections_response" + })) + elif message_type == "has_remote_connections_response": + flag = self._has_remote_connections_flags.get(sid) + if flag: + flag.set() + elif message_type == "session_closing": + # Session closing event - We only get this in the event of graceful shutdown, + # which can't be guaranteed - nodes can simply vanish unexpectedly! + logger.info(f"session_closing:{sid}") + for connection_id, local_sid in self.local_connection_id_to_session_id.items(): + if sid == local_sid: + logger.warning('local_connection_to_closing_session:{connection_id}:{sid}') + await self.sio.disconnect(connection_id) async def attach_to_conversation(self, sid: str) -> Conversation | None: start_time = time.time() @@ -134,7 +155,7 @@ async def _is_session_running_in_cluster(self, sid: str) -> bool: "sid": sid, "message_type": "is_session_running", })) - async with asyncio.timeout(_SESSION_RUNNING_TIMEOUT): + async with asyncio.timeout(_REDIS_POLL_TIMEOUT): await flag.wait() result = flag.is_set() @@ -145,8 +166,28 @@ async def _is_session_running_in_cluster(self, sid: str) -> bool: finally: self._session_is_running_flags.pop(sid) - async def start_local_session(self, sid: str, data: dict): + async def _has_remote_connections(self, sid: str) -> bool: + """ As the rest of the cluster if they still want this session running. Wait a for a short timeout for a reply """ + # Create a flag for the callback + flag = asyncio.Event() + self._has_remote_connections_flags[sid] = flag + try: + await self._get_redis_client().publish("oh_event", json.dumps({ + "sid": sid, + "message_type": "has_remote_connections_query", + })) + async with asyncio.timeout(_REDIS_POLL_TIMEOUT): + await flag.wait() + + result = flag.is_set() + return result + except TimeoutError: + # Nobody replied in time + return False + finally: + self._has_remote_connections_flags.pop(sid) + async def start_local_session(self, sid: str, data: dict): # Start a new local session logger.info(f'start_new_local_session:{sid}') session = Session( @@ -156,7 +197,6 @@ async def start_local_session(self, sid: str, data: dict): await session.initialize_agent(data) return session.agent_session.event_stream - async def send_to_event_stream(self, connection_id: str, data: dict): # If there is a local session running, send to that sid = self.local_connection_id_to_session_id.get(connection_id) @@ -166,15 +206,19 @@ async def send_to_event_stream(self, connection_id: str, data: dict): await session.dispatch(data) return - # If there is a remote session running, send to that redis_client = self._get_redis_client() if redis_client: - await redis_client.publish("oh_event", json.dumps({ - "sid": sid, - "message_type": "event", - "data": data, - })) - return + # If we have a recent report that the session is alive in another pod + last_alive_at = self._last_alive_timestamps.get(sid) or 0 + next_alive_check = last_alive_at + _CHECK_ALIVE_INTERVAL + if next_alive_check > time.time() or self._is_session_running_in_cluster(): + # Send the event to the other pod + await redis_client.publish("oh_event", json.dumps({ + "sid": sid, + "message_type": "event", + "data": data, + })) + return raise RuntimeError(f'no_connected_session:{sid}') @@ -189,54 +233,60 @@ async def disconnect_from_session(self, connection_id: str): if session: logger.info(f'close_session:{connection_id}:{sid}') if should_continue(): - asyncio.create_task(self._cleanup_session_later(session, connection_id)) + asyncio.create_task(self._cleanup_session_later(session)) else: - await self._cleanup_session(session, connection_id, True) + await self._close_session(session) - async def _cleanup_session_later(self, session: Session, connection_id: str): + async def _cleanup_session_later(self, session: Session): # Once there have been no connections to a session for a reasonable period, we close it try: await asyncio.sleep(self.config.sandbox.close_delay) finally: # If the sleep was cancelled, we still want to close these - await self._cleanup_session(session, connection_id, False) + await self._cleanup_session(session) - async def _cleanup_session(self, session: Session, connection_id: str, force: bool): - # Get local connections - has_local_connections = next(( - True for v in self.local_connection_id_to_session_id.values() - if v == session.sid - ), False) - - # If no local connections, get connections through redis - redis_connections = None - redis_client = self._get_redis_client() - if redis_client: - key = _CONNECTION_KEY.format(sid=session.sid) - await redis_client.lrem(key, 0, connection_id) - redis_connections = await redis_client.lrange(key, 0, -1) - redis_connections = [ - c.decode() for c in redis_connections - ] - logger.info(f'close_orphaned_session:{redis_connections}') - if not redis_connections: - await redis_client.delete(key) - redis_connections = [ - c for c in redis_connections - if c not in self.local_connection_id_to_session_id - ] - logger.info(f'close_orphaned_session:2:{redis_connections}') - - # If no connections, close session - if force or (not has_local_connections and not redis_connections): - - # We alert the cluster in case they are interested - if redis_client: - await redis_client.publish("oh_event", json.dumps({ - "sid": session.sid, - "message_type": "session_closing" - })) - - logger.info(f'do_close_session') - session.close() - self.local_sessions_by_sid.pop(session.sid, None) + async def _cleanup_session(self, session: Session): + # Get local connections + has_local_connections = next(( + True for v in self.local_connection_id_to_session_id.values() + if v == session.sid + ), False) + if has_local_connections: + return False + + # If no local connections, get connections through redis + redis_client = self._get_redis_client() + if redis_client and self._has_remote_connections(session.sid): + return False + + # We alert the cluster in case they are interested + if redis_client: + await redis_client.publish("oh_event", json.dumps({ + "sid": session.sid, + "message_type": "session_closing" + })) + + self._close_session() + + async def _close_session(self, session: Session): + logger.info(f'_close_session:{session.sid}') + + # Clear up local variables + connection_ids_to_remove = list( + connection_id for connection_id, sid in self.local_connection_id_to_session_id.items() + if sid == session.sid + ) + for connnnection_id in connection_ids_to_remove: + self.local_connection_id_to_session_id.pop(connnnection_id, None) + + self.local_sessions_by_sid.pop(session.sid, None) + + # We alert the cluster in case they are interested + redis_client = self._get_redis_client() + if redis_client: + await redis_client.publish("oh_event", json.dumps({ + "sid": session.sid, + "message_type": "session_closing" + })) + + session.close() diff --git a/tests/unit/test_manager.py b/tests/unit/test_manager.py new file mode 100644 index 000000000000..b57d2ab3bfc5 --- /dev/null +++ b/tests/unit/test_manager.py @@ -0,0 +1,174 @@ + + +import asyncio +import json +import pytest +from dataclasses import dataclass +from unittest.mock import AsyncMock, MagicMock, patch + +from openhands.core.config.app_config import AppConfig +from openhands.server.session.manager import SessionManager +from openhands.storage.memory import InMemoryFileStore + + +@dataclass +class GetMessageMock: + message: dict | None + sleep_time: int = 0.01 + + async def get_message(self, **kwargs): + await asyncio.sleep(self.sleep_time) + return {"data":json.dumps(self.message)} + + +def get_mock_sio(get_message: GetMessageMock | None = None): + sio = MagicMock() + sio.enter_room = AsyncMock() + sio.manager.redis = MagicMock() + sio.manager.redis.publish = AsyncMock() + pubsub = AsyncMock() + pubsub.get_message = (get_message or GetMessageMock(None)).get_message + sio.manager.redis.pubsub.return_value = pubsub + return sio + + +@pytest.mark.asyncio +async def test_session_not_running_in_cluster(): + sio = get_mock_sio() + with ( + patch("openhands.server.session.manager._REDIS_POLL_TIMEOUT", 0.01), + ): + async with SessionManager(sio, AppConfig(), InMemoryFileStore()) as session_manager: + result = await session_manager._is_session_running_in_cluster("non-existant-session") + assert result == False + assert sio.manager.redis.publish.await_count == 1 + sio.manager.redis.publish.assert_called_once_with( + 'oh_event', '{"sid": "non-existant-session", "message_type": "is_session_running"}' + ) + + +@pytest.mark.asyncio +async def test_session_is_running_in_cluster(): + sio = get_mock_sio(GetMessageMock({"sid": "existing-session", "message_type": "session_is_running"})) + with ( + patch("openhands.server.session.manager._REDIS_POLL_TIMEOUT", 0.02), + ): + async with SessionManager(sio, AppConfig(), InMemoryFileStore()) as session_manager: + result = await session_manager._is_session_running_in_cluster("existing-session") + assert result == True + assert sio.manager.redis.publish.await_count == 1 + sio.manager.redis.publish.assert_called_once_with( + 'oh_event', '{"sid": "existing-session", "message_type": "is_session_running"}' + ) + + +@pytest.mark.asyncio +async def test_init_new_local_session(): + session_instance = AsyncMock() + session_instance.agent_session = MagicMock() + mock_session = MagicMock() + mock_session.return_value = session_instance + sio = get_mock_sio() + is_session_running_in_cluster_mock = AsyncMock() + is_session_running_in_cluster_mock.return_value = False + with ( + patch("openhands.server.session.manager.Session", mock_session), + patch("openhands.server.session.manager._REDIS_POLL_TIMEOUT", 0.01), + patch("openhands.server.session.manager.SessionManager._redis_subscribe", AsyncMock()), + patch("openhands.server.session.manager.SessionManager._is_session_running_in_cluster", is_session_running_in_cluster_mock), + ): + async with SessionManager(sio, AppConfig(), InMemoryFileStore()) as session_manager: + await session_manager.init_or_join_session("new-session-id", "new-session-id", {"type": "mock-settings"}) + assert session_instance.initialize_agent.call_count == 1 + assert sio.enter_room.await_count == 1 + + +@pytest.mark.asyncio +async def test_join_local_session(): + session_instance = AsyncMock() + session_instance.agent_session = MagicMock() + mock_session = MagicMock() + mock_session.return_value = session_instance + sio = get_mock_sio() + is_session_running_in_cluster_mock = AsyncMock() + is_session_running_in_cluster_mock.return_value = False + with ( + patch("openhands.server.session.manager.Session", mock_session), + patch("openhands.server.session.manager._REDIS_POLL_TIMEOUT", 0.01), + patch("openhands.server.session.manager.SessionManager._redis_subscribe", AsyncMock()), + patch("openhands.server.session.manager.SessionManager._is_session_running_in_cluster", is_session_running_in_cluster_mock), + ): + async with SessionManager(sio, AppConfig(), InMemoryFileStore()) as session_manager: + # First call initializes + await session_manager.init_or_join_session("new-session-id", "new-session-id", {"type": "mock-settings"}) + # Second call joins + await session_manager.init_or_join_session("new-session-id", "extra-connection-id", {"type": "mock-settings"}) + assert session_instance.initialize_agent.call_count == 1 + assert sio.enter_room.await_count == 2 + + +@pytest.mark.asyncio +async def test_join_cluster_session(): + session_instance = AsyncMock() + session_instance.agent_session = MagicMock() + mock_session = MagicMock() + mock_session.return_value = session_instance + sio = get_mock_sio() + is_session_running_in_cluster_mock = AsyncMock() + is_session_running_in_cluster_mock.return_value = True + with ( + patch("openhands.server.session.manager.Session", mock_session), + patch("openhands.server.session.manager._REDIS_POLL_TIMEOUT", 0.01), + patch("openhands.server.session.manager.SessionManager._redis_subscribe", AsyncMock()), + patch("openhands.server.session.manager.SessionManager._is_session_running_in_cluster", is_session_running_in_cluster_mock), + ): + async with SessionManager(sio, AppConfig(), InMemoryFileStore()) as session_manager: + # First call initializes + await session_manager.init_or_join_session("new-session-id", "new-session-id", {"type": "mock-settings"}) + assert session_instance.initialize_agent.call_count == 0 + assert sio.enter_room.await_count == 1 + + +@pytest.mark.asyncio +async def test_add_to_local_event_stream(): + session_instance = AsyncMock() + session_instance.agent_session = MagicMock() + mock_session = MagicMock() + mock_session.return_value = session_instance + sio = get_mock_sio() + is_session_running_in_cluster_mock = AsyncMock() + is_session_running_in_cluster_mock.return_value = False + with ( + patch("openhands.server.session.manager.Session", mock_session), + patch("openhands.server.session.manager._REDIS_POLL_TIMEOUT", 0.01), + patch("openhands.server.session.manager.SessionManager._redis_subscribe", AsyncMock()), + patch("openhands.server.session.manager.SessionManager._is_session_running_in_cluster", is_session_running_in_cluster_mock), + ): + async with SessionManager(sio, AppConfig(), InMemoryFileStore()) as session_manager: + await session_manager.init_or_join_session("new-session-id", "connection-id", {"type": "mock-settings"}) + await session_manager.send_to_event_stream("connection-id", {"event_type": "some_event"}) + session_instance.dispatch.assert_called_once_with({"event_type": "some_event"}) + + +@pytest.mark.asyncio +async def test_add_to_cluster_event_stream(): + session_instance = AsyncMock() + session_instance.agent_session = MagicMock() + mock_session = MagicMock() + mock_session.return_value = session_instance + sio = get_mock_sio() + is_session_running_in_cluster_mock = AsyncMock() + is_session_running_in_cluster_mock.return_value = True + with ( + patch("openhands.server.session.manager.Session", mock_session), + patch("openhands.server.session.manager._REDIS_POLL_TIMEOUT", 0.01), + patch("openhands.server.session.manager.SessionManager._redis_subscribe", AsyncMock()), + patch("openhands.server.session.manager.SessionManager._is_session_running_in_cluster", is_session_running_in_cluster_mock), + ): + async with SessionManager(sio, AppConfig(), InMemoryFileStore()) as session_manager: + await session_manager.init_or_join_session("new-session-id", "connection-id", {"type": "mock-settings"}) + await session_manager.send_to_event_stream("connection-id", {"event_type": "some_event"}) + assert sio.manager.redis.publish.await_count == 1 + sio.manager.redis.publish.assert_called_once_with( + 'oh_event', '{"sid": "new-session-id", "message_type": "event", "data": {"event_type": "some_event"}}' + ) From 8325cfb939bf4cd2a87e0f113ca4bbce8c204ae0 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Mon, 25 Nov 2024 09:23:18 -0700 Subject: [PATCH 75/91] WIP --- tests/unit/test_manager.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/unit/test_manager.py b/tests/unit/test_manager.py index b57d2ab3bfc5..d47044ba4e46 100644 --- a/tests/unit/test_manager.py +++ b/tests/unit/test_manager.py @@ -40,7 +40,7 @@ async def test_session_not_running_in_cluster(): ): async with SessionManager(sio, AppConfig(), InMemoryFileStore()) as session_manager: result = await session_manager._is_session_running_in_cluster("non-existant-session") - assert result == False + assert result is False assert sio.manager.redis.publish.await_count == 1 sio.manager.redis.publish.assert_called_once_with( 'oh_event', '{"sid": "non-existant-session", "message_type": "is_session_running"}' @@ -55,7 +55,7 @@ async def test_session_is_running_in_cluster(): ): async with SessionManager(sio, AppConfig(), InMemoryFileStore()) as session_manager: result = await session_manager._is_session_running_in_cluster("existing-session") - assert result == True + assert result is True assert sio.manager.redis.publish.await_count == 1 sio.manager.redis.publish.assert_called_once_with( 'oh_event', '{"sid": "existing-session", "message_type": "is_session_running"}' From fca488ba67356e9afedc087ae7dc8a9b6b017d4a Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Mon, 25 Nov 2024 09:30:04 -0700 Subject: [PATCH 76/91] WIP --- openhands/server/session/manager.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index dd578be6b613..a3e490916466 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -200,18 +200,20 @@ async def start_local_session(self, sid: str, data: dict): async def send_to_event_stream(self, connection_id: str, data: dict): # If there is a local session running, send to that sid = self.local_connection_id_to_session_id.get(connection_id) - if sid: - session = self.local_sessions_by_sid.get(sid) - if session: - await session.dispatch(data) - return - + if not sid: + raise RuntimeError(f'no_connected_session:{connection_id}') + + session = self.local_sessions_by_sid.get(sid) + if session: + await session.dispatch(data) + return + redis_client = self._get_redis_client() if redis_client: # If we have a recent report that the session is alive in another pod last_alive_at = self._last_alive_timestamps.get(sid) or 0 next_alive_check = last_alive_at + _CHECK_ALIVE_INTERVAL - if next_alive_check > time.time() or self._is_session_running_in_cluster(): + if next_alive_check > time.time() or self._is_session_running_in_cluster(sid): # Send the event to the other pod await redis_client.publish("oh_event", json.dumps({ "sid": sid, @@ -220,7 +222,7 @@ async def send_to_event_stream(self, connection_id: str, data: dict): })) return - raise RuntimeError(f'no_connected_session:{sid}') + raise RuntimeError(f'no_connected_session:{connection_id}:{sid}') async def disconnect_from_session(self, connection_id: str): sid = self.local_connection_id_to_session_id.pop(connection_id, None) @@ -266,7 +268,7 @@ async def _cleanup_session(self, session: Session): "message_type": "session_closing" })) - self._close_session() + await self._close_session(session) async def _close_session(self, session: Session): logger.info(f'_close_session:{session.sid}') From d4ccbd13bc8ad43da68d0f248f02a6642df4f2b3 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Mon, 25 Nov 2024 09:33:24 -0700 Subject: [PATCH 77/91] Ruff --- openhands/server/session/agent_session.py | 1 - openhands/server/session/manager.py | 175 ++++++++++++--------- openhands/server/session/session.py | 12 +- tests/unit/test_manager.py | 177 +++++++++++++++------- 4 files changed, 236 insertions(+), 129 deletions(-) diff --git a/openhands/server/session/agent_session.py b/openhands/server/session/agent_session.py index f0fdf247084a..2dc0ebace229 100644 --- a/openhands/server/session/agent_session.py +++ b/openhands/server/session/agent_session.py @@ -139,7 +139,6 @@ async def _close(self): end_state.save_to_session(self.sid, self.file_store) await self.controller.close() if self.runtime is not None: - self.runtime.close() if self.security_analyzer is not None: await self.security_analyzer.close() diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index a3e490916466..b0cc9c4b6eaf 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -28,85 +28,98 @@ class SessionManager: _last_alive_timestamps: dict[str, float] = field(default_factory=dict) _redis_listen_task: asyncio.Task | None = None _session_is_running_flags: dict[str, asyncio.Event] = field(default_factory=dict) - _has_remote_connections_flags: dict[str, asyncio.Event] = field(default_factory=dict) + _has_remote_connections_flags: dict[str, asyncio.Event] = field( + default_factory=dict + ) async def __aenter__(self): redis_client = self._get_redis_client() if redis_client: self._redis_listen_task = asyncio.create_task(self._redis_subscribe()) return self - + async def __aexit__(self, exc_type, exc_value, traceback): if self._redis_listen_task: self._redis_listen_task.cancel() self._redis_listen_task = None def _get_redis_client(self): - redis_client = getattr(self.sio.manager, "redis", None) + redis_client = getattr(self.sio.manager, 'redis', None) return redis_client - + async def _redis_subscribe(self): """ We use a redis backchannel to send actions between server nodes """ redis_client = self._get_redis_client() pubsub = redis_client.pubsub() - await pubsub.subscribe("oh_event") + await pubsub.subscribe('oh_event') while should_continue(): try: - message = await pubsub.get_message(ignore_subscribe_messages=True, timeout=5) + message = await pubsub.get_message( + ignore_subscribe_messages=True, timeout=5 + ) if message: - await self._process_message(message) + await self._process_message(message) except asyncio.CancelledError: return except: try: asyncio.get_running_loop() - logger.warning("error_reading_from_redis", exc_info=True, stack_info=True) + logger.warning( + 'error_reading_from_redis', exc_info=True, stack_info=True + ) except RuntimeError: return # Loop has been shut down async def _process_message(self, message: dict): data = json.loads(message['data']) - logger.info(f"got_published_message:{message}") - sid = data["sid"] - message_type = data["message_type"] - if message_type == "event": + logger.info(f'got_published_message:{message}') + sid = data['sid'] + message_type = data['message_type'] + if message_type == 'event': session = self.local_sessions_by_sid.get(sid) if session: - await session.dispatch(data["data"]) - elif message_type == "is_session_running": + await session.dispatch(data['data']) + elif message_type == 'is_session_running': # Another node in the cluster is asking if the current node is running the session given. session = self.local_sessions_by_sid.get(sid) if session: - await self._get_redis_client().publish("oh_event", json.dumps({ - "sid": sid, - "message_type": "session_is_running" - })) - elif message_type == "session_is_running": + await self._get_redis_client().publish( + 'oh_event', + json.dumps({'sid': sid, 'message_type': 'session_is_running'}), + ) + elif message_type == 'session_is_running': self._last_alive_timestamps[sid] = time.time() flag = self._session_is_running_flags.get(sid) if flag: flag.set() - elif message_type == "has_remote_connections_query": + elif message_type == 'has_remote_connections_query': # Another node in the cluster is asking if the current node is connected to a session required = sid in self.local_connection_id_to_session_id.values() if required: - await self._get_redis_client().publish("oh_event", json.dumps({ - "sid": sid, - "message_type": "has_remote_connections_response" - })) - elif message_type == "has_remote_connections_response": + await self._get_redis_client().publish( + 'oh_event', + json.dumps( + {'sid': sid, 'message_type': 'has_remote_connections_response'} + ), + ) + elif message_type == 'has_remote_connections_response': flag = self._has_remote_connections_flags.get(sid) if flag: flag.set() - elif message_type == "session_closing": - # Session closing event - We only get this in the event of graceful shutdown, + elif message_type == 'session_closing': + # Session closing event - We only get this in the event of graceful shutdown, # which can't be guaranteed - nodes can simply vanish unexpectedly! - logger.info(f"session_closing:{sid}") - for connection_id, local_sid in self.local_connection_id_to_session_id.items(): + logger.info(f'session_closing:{sid}') + for ( + connection_id, + local_sid, + ) in self.local_connection_id_to_session_id.items(): if sid == local_sid: - logger.warning('local_connection_to_closing_session:{connection_id}:{sid}') + logger.warning( + 'local_connection_to_closing_session:{connection_id}:{sid}' + ) await self.sio.disconnect(connection_id) async def attach_to_conversation(self, sid: str) -> Conversation | None: @@ -131,30 +144,35 @@ async def detach_from_conversation(self, conversation: Conversation): async def init_or_join_session(self, sid: str, connection_id: str, data: dict): await self.sio.enter_room(connection_id, ROOM_KEY.format(sid=sid)) self.local_connection_id_to_session_id[connection_id] = sid - + # If we have a local session running, use that session = self.local_sessions_by_sid.get(sid) if session: logger.info(f'found_local_session:{sid}') - return session.agent_session.event_stream + return session.agent_session.event_stream # If there is a remote session running, retrieve existing events for that redis_client = self._get_redis_client() if redis_client and await self._is_session_running_in_cluster(sid): - return EventStream(sid, self.file_store) - + return EventStream(sid, self.file_store) + return await self.start_local_session(sid, data) async def _is_session_running_in_cluster(self, sid: str) -> bool: - """ As the rest of the cluster if a session is running. Wait a for a short timeout for a reply """ + """As the rest of the cluster if a session is running. Wait a for a short timeout for a reply""" # Create a flag for the callback flag = asyncio.Event() self._session_is_running_flags[sid] = flag try: - await self._get_redis_client().publish("oh_event", json.dumps({ - "sid": sid, - "message_type": "is_session_running", - })) + await self._get_redis_client().publish( + 'oh_event', + json.dumps( + { + 'sid': sid, + 'message_type': 'is_session_running', + } + ), + ) async with asyncio.timeout(_REDIS_POLL_TIMEOUT): await flag.wait() @@ -167,15 +185,20 @@ async def _is_session_running_in_cluster(self, sid: str) -> bool: self._session_is_running_flags.pop(sid) async def _has_remote_connections(self, sid: str) -> bool: - """ As the rest of the cluster if they still want this session running. Wait a for a short timeout for a reply """ + """As the rest of the cluster if they still want this session running. Wait a for a short timeout for a reply""" # Create a flag for the callback flag = asyncio.Event() self._has_remote_connections_flags[sid] = flag try: - await self._get_redis_client().publish("oh_event", json.dumps({ - "sid": sid, - "message_type": "has_remote_connections_query", - })) + await self._get_redis_client().publish( + 'oh_event', + json.dumps( + { + 'sid': sid, + 'message_type': 'has_remote_connections_query', + } + ), + ) async with asyncio.timeout(_REDIS_POLL_TIMEOUT): await flag.wait() @@ -196,7 +219,7 @@ async def start_local_session(self, sid: str, data: dict): self.local_sessions_by_sid[sid] = session await session.initialize_agent(data) return session.agent_session.event_stream - + async def send_to_event_stream(self, connection_id: str, data: dict): # If there is a local session running, send to that sid = self.local_connection_id_to_session_id.get(connection_id) @@ -207,23 +230,30 @@ async def send_to_event_stream(self, connection_id: str, data: dict): if session: await session.dispatch(data) return - + redis_client = self._get_redis_client() if redis_client: # If we have a recent report that the session is alive in another pod last_alive_at = self._last_alive_timestamps.get(sid) or 0 next_alive_check = last_alive_at + _CHECK_ALIVE_INTERVAL - if next_alive_check > time.time() or self._is_session_running_in_cluster(sid): + if next_alive_check > time.time() or self._is_session_running_in_cluster( + sid + ): # Send the event to the other pod - await redis_client.publish("oh_event", json.dumps({ - "sid": sid, - "message_type": "event", - "data": data, - })) + await redis_client.publish( + 'oh_event', + json.dumps( + { + 'sid': sid, + 'message_type': 'event', + 'data': data, + } + ), + ) return - + raise RuntimeError(f'no_connected_session:{connection_id}:{sid}') - + async def disconnect_from_session(self, connection_id: str): sid = self.local_connection_id_to_session_id.pop(connection_id, None) if not sid: @@ -238,7 +268,7 @@ async def disconnect_from_session(self, connection_id: str): asyncio.create_task(self._cleanup_session_later(session)) else: await self._close_session(session) - + async def _cleanup_session_later(self, session: Session): # Once there have been no connections to a session for a reasonable period, we close it try: @@ -246,13 +276,17 @@ async def _cleanup_session_later(self, session: Session): finally: # If the sleep was cancelled, we still want to close these await self._cleanup_session(session) - + async def _cleanup_session(self, session: Session): # Get local connections - has_local_connections = next(( - True for v in self.local_connection_id_to_session_id.values() - if v == session.sid - ), False) + has_local_connections = next( + ( + True + for v in self.local_connection_id_to_session_id.values() + if v == session.sid + ), + False, + ) if has_local_connections: return False @@ -263,10 +297,10 @@ async def _cleanup_session(self, session: Session): # We alert the cluster in case they are interested if redis_client: - await redis_client.publish("oh_event", json.dumps({ - "sid": session.sid, - "message_type": "session_closing" - })) + await redis_client.publish( + 'oh_event', + json.dumps({'sid': session.sid, 'message_type': 'session_closing'}), + ) await self._close_session(session) @@ -275,20 +309,21 @@ async def _close_session(self, session: Session): # Clear up local variables connection_ids_to_remove = list( - connection_id for connection_id, sid in self.local_connection_id_to_session_id.items() + connection_id + for connection_id, sid in self.local_connection_id_to_session_id.items() if sid == session.sid ) for connnnection_id in connection_ids_to_remove: self.local_connection_id_to_session_id.pop(connnnection_id, None) - + self.local_sessions_by_sid.pop(session.sid, None) # We alert the cluster in case they are interested redis_client = self._get_redis_client() if redis_client: - await redis_client.publish("oh_event", json.dumps({ - "sid": session.sid, - "message_type": "session_closing" - })) + await redis_client.publish( + 'oh_event', + json.dumps({'sid': session.sid, 'message_type': 'session_closing'}), + ) session.close() diff --git a/openhands/server/session/session.py b/openhands/server/session/session.py index 8fb98dabac71..e26d237a8764 100644 --- a/openhands/server/session/session.py +++ b/openhands/server/session/session.py @@ -25,7 +25,7 @@ from openhands.storage.files import FileStore from openhands.utils.async_utils import call_coro_in_bg_thread -ROOM_KEY = "room:{sid}" +ROOM_KEY = 'room:{sid}' class Session: @@ -39,7 +39,11 @@ class Session: settings: dict | None def __init__( - self, sid: str, config: AppConfig, file_store: FileStore, sio: socketio.AsyncServer | None + self, + sid: str, + config: AppConfig, + file_store: FileStore, + sio: socketio.AsyncServer | None, ): self.sid = sid self.sio = sio @@ -169,12 +173,12 @@ async def _send(self, data: dict[str, object]) -> bool: if not self.is_alive: return False if self.sio: - await self.sio.emit("oh_event", data, to=ROOM_KEY.format(sid=self.sid)) + await self.sio.emit('oh_event', data, to=ROOM_KEY.format(sid=self.sid)) await asyncio.sleep(0.001) # This flushes the data to the client self.last_active_ts = int(time.time()) return True except RuntimeError: - logger.error("Error sending", stack_info=True, exc_info=True) + logger.error('Error sending', stack_info=True, exc_info=True) self.is_alive = False return False diff --git a/tests/unit/test_manager.py b/tests/unit/test_manager.py index d47044ba4e46..ab8122905185 100644 --- a/tests/unit/test_manager.py +++ b/tests/unit/test_manager.py @@ -1,5 +1,3 @@ - - import asyncio import json import pytest @@ -18,7 +16,7 @@ class GetMessageMock: async def get_message(self, **kwargs): await asyncio.sleep(self.sleep_time) - return {"data":json.dumps(self.message)} + return {'data': json.dumps(self.message)} def get_mock_sio(get_message: GetMessageMock | None = None): @@ -36,29 +34,43 @@ def get_mock_sio(get_message: GetMessageMock | None = None): async def test_session_not_running_in_cluster(): sio = get_mock_sio() with ( - patch("openhands.server.session.manager._REDIS_POLL_TIMEOUT", 0.01), - ): - async with SessionManager(sio, AppConfig(), InMemoryFileStore()) as session_manager: - result = await session_manager._is_session_running_in_cluster("non-existant-session") + patch('openhands.server.session.manager._REDIS_POLL_TIMEOUT', 0.01), + ): + async with SessionManager( + sio, AppConfig(), InMemoryFileStore() + ) as session_manager: + result = await session_manager._is_session_running_in_cluster( + 'non-existant-session' + ) assert result is False assert sio.manager.redis.publish.await_count == 1 sio.manager.redis.publish.assert_called_once_with( - 'oh_event', '{"sid": "non-existant-session", "message_type": "is_session_running"}' + 'oh_event', + '{"sid": "non-existant-session", "message_type": "is_session_running"}', ) @pytest.mark.asyncio async def test_session_is_running_in_cluster(): - sio = get_mock_sio(GetMessageMock({"sid": "existing-session", "message_type": "session_is_running"})) + sio = get_mock_sio( + GetMessageMock( + {'sid': 'existing-session', 'message_type': 'session_is_running'} + ) + ) with ( - patch("openhands.server.session.manager._REDIS_POLL_TIMEOUT", 0.02), - ): - async with SessionManager(sio, AppConfig(), InMemoryFileStore()) as session_manager: - result = await session_manager._is_session_running_in_cluster("existing-session") + patch('openhands.server.session.manager._REDIS_POLL_TIMEOUT', 0.02), + ): + async with SessionManager( + sio, AppConfig(), InMemoryFileStore() + ) as session_manager: + result = await session_manager._is_session_running_in_cluster( + 'existing-session' + ) assert result is True assert sio.manager.redis.publish.await_count == 1 sio.manager.redis.publish.assert_called_once_with( - 'oh_event', '{"sid": "existing-session", "message_type": "is_session_running"}' + 'oh_event', + '{"sid": "existing-session", "message_type": "is_session_running"}', ) @@ -72,13 +84,23 @@ async def test_init_new_local_session(): is_session_running_in_cluster_mock = AsyncMock() is_session_running_in_cluster_mock.return_value = False with ( - patch("openhands.server.session.manager.Session", mock_session), - patch("openhands.server.session.manager._REDIS_POLL_TIMEOUT", 0.01), - patch("openhands.server.session.manager.SessionManager._redis_subscribe", AsyncMock()), - patch("openhands.server.session.manager.SessionManager._is_session_running_in_cluster", is_session_running_in_cluster_mock), - ): - async with SessionManager(sio, AppConfig(), InMemoryFileStore()) as session_manager: - await session_manager.init_or_join_session("new-session-id", "new-session-id", {"type": "mock-settings"}) + patch('openhands.server.session.manager.Session', mock_session), + patch('openhands.server.session.manager._REDIS_POLL_TIMEOUT', 0.01), + patch( + 'openhands.server.session.manager.SessionManager._redis_subscribe', + AsyncMock(), + ), + patch( + 'openhands.server.session.manager.SessionManager._is_session_running_in_cluster', + is_session_running_in_cluster_mock, + ), + ): + async with SessionManager( + sio, AppConfig(), InMemoryFileStore() + ) as session_manager: + await session_manager.init_or_join_session( + 'new-session-id', 'new-session-id', {'type': 'mock-settings'} + ) assert session_instance.initialize_agent.call_count == 1 assert sio.enter_room.await_count == 1 @@ -93,16 +115,28 @@ async def test_join_local_session(): is_session_running_in_cluster_mock = AsyncMock() is_session_running_in_cluster_mock.return_value = False with ( - patch("openhands.server.session.manager.Session", mock_session), - patch("openhands.server.session.manager._REDIS_POLL_TIMEOUT", 0.01), - patch("openhands.server.session.manager.SessionManager._redis_subscribe", AsyncMock()), - patch("openhands.server.session.manager.SessionManager._is_session_running_in_cluster", is_session_running_in_cluster_mock), - ): - async with SessionManager(sio, AppConfig(), InMemoryFileStore()) as session_manager: + patch('openhands.server.session.manager.Session', mock_session), + patch('openhands.server.session.manager._REDIS_POLL_TIMEOUT', 0.01), + patch( + 'openhands.server.session.manager.SessionManager._redis_subscribe', + AsyncMock(), + ), + patch( + 'openhands.server.session.manager.SessionManager._is_session_running_in_cluster', + is_session_running_in_cluster_mock, + ), + ): + async with SessionManager( + sio, AppConfig(), InMemoryFileStore() + ) as session_manager: # First call initializes - await session_manager.init_or_join_session("new-session-id", "new-session-id", {"type": "mock-settings"}) + await session_manager.init_or_join_session( + 'new-session-id', 'new-session-id', {'type': 'mock-settings'} + ) # Second call joins - await session_manager.init_or_join_session("new-session-id", "extra-connection-id", {"type": "mock-settings"}) + await session_manager.init_or_join_session( + 'new-session-id', 'extra-connection-id', {'type': 'mock-settings'} + ) assert session_instance.initialize_agent.call_count == 1 assert sio.enter_room.await_count == 2 @@ -117,14 +151,24 @@ async def test_join_cluster_session(): is_session_running_in_cluster_mock = AsyncMock() is_session_running_in_cluster_mock.return_value = True with ( - patch("openhands.server.session.manager.Session", mock_session), - patch("openhands.server.session.manager._REDIS_POLL_TIMEOUT", 0.01), - patch("openhands.server.session.manager.SessionManager._redis_subscribe", AsyncMock()), - patch("openhands.server.session.manager.SessionManager._is_session_running_in_cluster", is_session_running_in_cluster_mock), - ): - async with SessionManager(sio, AppConfig(), InMemoryFileStore()) as session_manager: + patch('openhands.server.session.manager.Session', mock_session), + patch('openhands.server.session.manager._REDIS_POLL_TIMEOUT', 0.01), + patch( + 'openhands.server.session.manager.SessionManager._redis_subscribe', + AsyncMock(), + ), + patch( + 'openhands.server.session.manager.SessionManager._is_session_running_in_cluster', + is_session_running_in_cluster_mock, + ), + ): + async with SessionManager( + sio, AppConfig(), InMemoryFileStore() + ) as session_manager: # First call initializes - await session_manager.init_or_join_session("new-session-id", "new-session-id", {"type": "mock-settings"}) + await session_manager.init_or_join_session( + 'new-session-id', 'new-session-id', {'type': 'mock-settings'} + ) assert session_instance.initialize_agent.call_count == 0 assert sio.enter_room.await_count == 1 @@ -139,15 +183,27 @@ async def test_add_to_local_event_stream(): is_session_running_in_cluster_mock = AsyncMock() is_session_running_in_cluster_mock.return_value = False with ( - patch("openhands.server.session.manager.Session", mock_session), - patch("openhands.server.session.manager._REDIS_POLL_TIMEOUT", 0.01), - patch("openhands.server.session.manager.SessionManager._redis_subscribe", AsyncMock()), - patch("openhands.server.session.manager.SessionManager._is_session_running_in_cluster", is_session_running_in_cluster_mock), - ): - async with SessionManager(sio, AppConfig(), InMemoryFileStore()) as session_manager: - await session_manager.init_or_join_session("new-session-id", "connection-id", {"type": "mock-settings"}) - await session_manager.send_to_event_stream("connection-id", {"event_type": "some_event"}) - session_instance.dispatch.assert_called_once_with({"event_type": "some_event"}) + patch('openhands.server.session.manager.Session', mock_session), + patch('openhands.server.session.manager._REDIS_POLL_TIMEOUT', 0.01), + patch( + 'openhands.server.session.manager.SessionManager._redis_subscribe', + AsyncMock(), + ), + patch( + 'openhands.server.session.manager.SessionManager._is_session_running_in_cluster', + is_session_running_in_cluster_mock, + ), + ): + async with SessionManager( + sio, AppConfig(), InMemoryFileStore() + ) as session_manager: + await session_manager.init_or_join_session( + 'new-session-id', 'connection-id', {'type': 'mock-settings'} + ) + await session_manager.send_to_event_stream( + 'connection-id', {'event_type': 'some_event'} + ) + session_instance.dispatch.assert_called_once_with({'event_type': 'some_event'}) @pytest.mark.asyncio @@ -160,15 +216,28 @@ async def test_add_to_cluster_event_stream(): is_session_running_in_cluster_mock = AsyncMock() is_session_running_in_cluster_mock.return_value = True with ( - patch("openhands.server.session.manager.Session", mock_session), - patch("openhands.server.session.manager._REDIS_POLL_TIMEOUT", 0.01), - patch("openhands.server.session.manager.SessionManager._redis_subscribe", AsyncMock()), - patch("openhands.server.session.manager.SessionManager._is_session_running_in_cluster", is_session_running_in_cluster_mock), - ): - async with SessionManager(sio, AppConfig(), InMemoryFileStore()) as session_manager: - await session_manager.init_or_join_session("new-session-id", "connection-id", {"type": "mock-settings"}) - await session_manager.send_to_event_stream("connection-id", {"event_type": "some_event"}) + patch('openhands.server.session.manager.Session', mock_session), + patch('openhands.server.session.manager._REDIS_POLL_TIMEOUT', 0.01), + patch( + 'openhands.server.session.manager.SessionManager._redis_subscribe', + AsyncMock(), + ), + patch( + 'openhands.server.session.manager.SessionManager._is_session_running_in_cluster', + is_session_running_in_cluster_mock, + ), + ): + async with SessionManager( + sio, AppConfig(), InMemoryFileStore() + ) as session_manager: + await session_manager.init_or_join_session( + 'new-session-id', 'connection-id', {'type': 'mock-settings'} + ) + await session_manager.send_to_event_stream( + 'connection-id', {'event_type': 'some_event'} + ) assert sio.manager.redis.publish.await_count == 1 sio.manager.redis.publish.assert_called_once_with( - 'oh_event', '{"sid": "new-session-id", "message_type": "event", "data": {"event_type": "some_event"}}' + 'oh_event', + '{"sid": "new-session-id", "message_type": "event", "data": {"event_type": "some_event"}}', ) From 5f575f9b7b8a24f6224d29b29e5c72fc2962ea42 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Mon, 25 Nov 2024 09:35:19 -0700 Subject: [PATCH 78/91] Reorder imports --- tests/unit/test_manager.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/unit/test_manager.py b/tests/unit/test_manager.py index ab8122905185..4ede3e9bdc36 100644 --- a/tests/unit/test_manager.py +++ b/tests/unit/test_manager.py @@ -1,9 +1,10 @@ import asyncio import json -import pytest from dataclasses import dataclass from unittest.mock import AsyncMock, MagicMock, patch +import pytest + from openhands.core.config.app_config import AppConfig from openhands.server.session.manager import SessionManager from openhands.storage.memory import InMemoryFileStore From ffb443ba95cd6c997b0a26ddddf915f4c433601c Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Mon, 25 Nov 2024 09:59:23 -0700 Subject: [PATCH 79/91] Cleanup fix --- openhands/server/session/manager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openhands/server/session/manager.py b/openhands/server/session/manager.py index b0cc9c4b6eaf..0f61f513c123 100644 --- a/openhands/server/session/manager.py +++ b/openhands/server/session/manager.py @@ -292,7 +292,7 @@ async def _cleanup_session(self, session: Session): # If no local connections, get connections through redis redis_client = self._get_redis_client() - if redis_client and self._has_remote_connections(session.sid): + if redis_client and await self._has_remote_connections(session.sid): return False # We alert the cluster in case they are interested From f8b19cf6868938d9f8a66f9317e570e530fa0c64 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Mon, 25 Nov 2024 10:36:38 -0700 Subject: [PATCH 80/91] More logging on sigterm --- openhands/utils/shutdown_listener.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/openhands/utils/shutdown_listener.py b/openhands/utils/shutdown_listener.py index 3aedd2672270..1d29c1a53775 100644 --- a/openhands/utils/shutdown_listener.py +++ b/openhands/utils/shutdown_listener.py @@ -10,6 +10,8 @@ from uvicorn.server import HANDLED_SIGNALS +from openhands.core.logger import openhands_logger as logger + _should_exit = None @@ -17,6 +19,7 @@ def _register_signal_handler(sig: signal.Signals): original_handler = None def handler(sig_: int, frame: FrameType | None): + logger.info(f"shutdown_signal:{sig_}") global _should_exit _should_exit = True if original_handler: @@ -31,10 +34,15 @@ def _register_signal_handlers(): return _should_exit = False + logger.info("_register_signal_handlers") + # Check if we're in the main thread of the main interpreter if threading.current_thread() is threading.main_thread(): + logger.info("_register_signal_handlers:main_thread") for sig in HANDLED_SIGNALS: _register_signal_handler(sig) + else: + logger.info("_register_signal_handlers:not_main_thread") def should_exit() -> bool: From 9b7af2b339fc1a25e23b90f8d67fe8e88471d0d9 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Mon, 25 Nov 2024 11:10:13 -0700 Subject: [PATCH 81/91] Error handling and lint fixes --- .../runtime/impl/remote/remote_runtime.py | 273 +++++++++--------- openhands/utils/shutdown_listener.py | 8 +- 2 files changed, 143 insertions(+), 138 deletions(-) diff --git a/openhands/runtime/impl/remote/remote_runtime.py b/openhands/runtime/impl/remote/remote_runtime.py index cca97392f5a8..8db7510ba848 100644 --- a/openhands/runtime/impl/remote/remote_runtime.py +++ b/openhands/runtime/impl/remote/remote_runtime.py @@ -10,6 +10,7 @@ import tenacity from openhands.core.config import AppConfig +from openhands.core.logger import openhands_logger as logger from openhands.events import EventStream from openhands.events.action import ( BrowseInteractiveAction, @@ -33,6 +34,7 @@ RuntimeDisconnectedError, RuntimeNotFoundError, RuntimeNotReadyError, + RuntimeUnavailableError, ) from openhands.runtime.builder.remote import RemoteRuntimeBuilder from openhands.runtime.plugins import PluginRequirement @@ -54,7 +56,7 @@ def __init__( self, config: AppConfig, event_stream: EventStream, - sid: str = 'default', + sid: str = "default", plugins: list[PluginRequirement] | None = None, env_vars: dict[str, str] | None = None, status_callback: Optional[Callable] = None, @@ -77,15 +79,15 @@ def __init__( ) if self.config.sandbox.api_key is None: raise ValueError( - 'API key is required to use the remote runtime. ' - 'Please set the API key in the config (config.toml) or as an environment variable (SANDBOX_API_KEY).' + "API key is required to use the remote runtime. " + "Please set the API key in the config (config.toml) or as an environment variable (SANDBOX_API_KEY)." ) - self.session.headers.update({'X-API-Key': self.config.sandbox.api_key}) + self.session.headers.update({"X-API-Key": self.config.sandbox.api_key}) if self.config.workspace_base is not None: self.log( - 'debug', - 'Setting workspace_base is not supported in the remote runtime.', + "debug", + "Setting workspace_base is not supported in the remote runtime.", ) self.runtime_builder = RemoteRuntimeBuilder( @@ -100,7 +102,7 @@ async def connect(self): try: await call_sync_from_async(self._start_or_attach_to_runtime) except RuntimeNotReadyError: - self.log('error', 'Runtime failed to start, timed out before ready') + self.log("error", "Runtime failed to start, timed out before ready") raise await call_sync_from_async(self.setup_initial_env) self._runtime_initialized = True @@ -108,93 +110,93 @@ async def connect(self): def _start_or_attach_to_runtime(self): existing_runtime = self._check_existing_runtime() if existing_runtime: - self.log('debug', f'Using existing runtime with ID: {self.runtime_id}') + self.log("debug", f"Using existing runtime with ID: {self.runtime_id}") elif self.attach_to_existing: raise RuntimeNotFoundError( - f'Could not find existing runtime for SID: {self.sid}' + f"Could not find existing runtime for SID: {self.sid}" ) else: - self.send_status_message('STATUS$STARTING_CONTAINER') + self.send_status_message("STATUS$STARTING_CONTAINER") if self.config.sandbox.runtime_container_image is None: self.log( - 'info', - f'Building remote runtime with base image: {self.config.sandbox.base_container_image}', + "info", + f"Building remote runtime with base image: {self.config.sandbox.base_container_image}", ) self._build_runtime() else: self.log( - 'info', - f'Starting remote runtime with image: {self.config.sandbox.runtime_container_image}', + "info", + f"Starting remote runtime with image: {self.config.sandbox.runtime_container_image}", ) self.container_image = self.config.sandbox.runtime_container_image self._start_runtime() assert ( self.runtime_id is not None - ), 'Runtime ID is not set. This should never happen.' + ), "Runtime ID is not set. This should never happen." assert ( self.runtime_url is not None - ), 'Runtime URL is not set. This should never happen.' - self.send_status_message('STATUS$WAITING_FOR_CLIENT') + ), "Runtime URL is not set. This should never happen." + self.send_status_message("STATUS$WAITING_FOR_CLIENT") if not self.attach_to_existing: - self.log('info', 'Waiting for runtime to be alive...') + self.log("info", "Waiting for runtime to be alive...") self._wait_until_alive() if not self.attach_to_existing: - self.log('info', 'Runtime is ready.') - self.send_status_message(' ') + self.log("info", "Runtime is ready.") + self.send_status_message(" ") def _check_existing_runtime(self) -> bool: try: with self._send_request( - 'GET', - f'{self.config.sandbox.remote_runtime_api_url}/sessions/{self.sid}', + "GET", + f"{self.config.sandbox.remote_runtime_api_url}/sessions/{self.sid}", is_retry=False, timeout=5, ) as response: data = response.json() - status = data.get('status') - if status == 'running' or status == 'paused': + status = data.get("status") + if status == "running" or status == "paused": self._parse_runtime_response(response) except requests.HTTPError as e: if e.response.status_code == 404: return False - self.log('debug', f'Error while looking for remote runtime: {e}') + self.log("debug", f"Error while looking for remote runtime: {e}") raise - if status == 'running': + if status == "running": return True - elif status == 'stopped': - self.log('debug', 'Found existing remote runtime, but it is stopped') + elif status == "stopped": + self.log("debug", "Found existing remote runtime, but it is stopped") return False - elif status == 'paused': - self.log('debug', 'Found existing remote runtime, but it is paused') + elif status == "paused": + self.log("debug", "Found existing remote runtime, but it is paused") self._resume_runtime() return True else: - self.log('error', f'Invalid response from runtime API: {data}') + self.log("error", f"Invalid response from runtime API: {data}") return False def _build_runtime(self): - self.log('debug', f'Building RemoteRuntime config:\n{self.config}') + self.log("debug", f"Building RemoteRuntime config:\n{self.config}") with self._send_request( - 'GET', - f'{self.config.sandbox.remote_runtime_api_url}/registry_prefix', + "GET", + f"{self.config.sandbox.remote_runtime_api_url}/registry_prefix", is_retry=False, timeout=10, ) as response: response_json = response.json() - registry_prefix = response_json['registry_prefix'] - os.environ['OH_RUNTIME_RUNTIME_IMAGE_REPO'] = ( - registry_prefix.rstrip('/') + '/runtime' + registry_prefix = response_json["registry_prefix"] + os.environ["OH_RUNTIME_RUNTIME_IMAGE_REPO"] = ( + registry_prefix.rstrip("/") + "/runtime" ) self.log( - 'debug', + "debug", f'Runtime image repo: {os.environ["OH_RUNTIME_RUNTIME_IMAGE_REPO"]}', ) if self.config.sandbox.runtime_extra_deps: self.log( - 'debug', - f'Installing extra user-provided dependencies in the runtime image: {self.config.sandbox.runtime_extra_deps}', + "debug", + f"Installing extra user-provided dependencies in the runtime image: {self.config.sandbox.runtime_extra_deps}", ) # Build the container image @@ -207,93 +209,96 @@ def _build_runtime(self): ) with self._send_request( - 'GET', - f'{self.config.sandbox.remote_runtime_api_url}/image_exists', + "GET", + f"{self.config.sandbox.remote_runtime_api_url}/image_exists", is_retry=False, - params={'image': self.container_image}, + params={"image": self.container_image}, timeout=10, ) as response: - if not response.json()['exists']: + if not response.json()["exists"]: raise RuntimeError( - f'Container image {self.container_image} does not exist' + f"Container image {self.container_image} does not exist" ) def _start_runtime(self): # Prepare the request body for the /start endpoint plugin_args = [] if self.plugins is not None and len(self.plugins) > 0: - plugin_args = ['--plugins'] + [plugin.name for plugin in self.plugins] + plugin_args = ["--plugins"] + [plugin.name for plugin in self.plugins] browsergym_args = [] if self.config.sandbox.browsergym_eval_env is not None: browsergym_args = [ - '--browsergym-eval-env' - ] + self.config.sandbox.browsergym_eval_env.split(' ') + "--browsergym-eval-env" + ] + self.config.sandbox.browsergym_eval_env.split(" ") command = get_remote_startup_command( self.port, self.config.workspace_mount_path_in_sandbox, - 'openhands' if self.config.run_as_openhands else 'root', + "openhands" if self.config.run_as_openhands else "root", self.config.sandbox.user_id, plugin_args, browsergym_args, is_root=not self.config.run_as_openhands, # is_root=True when running as root ) start_request = { - 'image': self.container_image, - 'command': command, - 'working_dir': '/openhands/code/', - 'environment': {'DEBUG': 'true'} if self.config.debug else {}, - 'session_id': self.sid, + "image": self.container_image, + "command": command, + "working_dir": "/openhands/code/", + "environment": {"DEBUG": "true"} if self.config.debug else {}, + "session_id": self.sid, } # Start the sandbox using the /start endpoint - with self._send_request( - 'POST', - f'{self.config.sandbox.remote_runtime_api_url}/start', - is_retry=False, - json=start_request, - ) as response: - self._parse_runtime_response(response) - self.log( - 'debug', - f'Runtime started. URL: {self.runtime_url}', - ) + try: + with self._send_request( + "POST", + f"{self.config.sandbox.remote_runtime_api_url}/start", + is_retry=False, + json=start_request, + ) as response: + self._parse_runtime_response(response) + self.log( + "debug", + f"Runtime started. URL: {self.runtime_url}", + ) + except requests.HTTPError as e: + raise RuntimeUnavailableError() from e def _resume_runtime(self): with self._send_request( - 'POST', - f'{self.config.sandbox.remote_runtime_api_url}/resume', + "POST", + f"{self.config.sandbox.remote_runtime_api_url}/resume", is_retry=False, - json={'runtime_id': self.runtime_id}, + json={"runtime_id": self.runtime_id}, timeout=30, ): pass - self.log('debug', 'Runtime resumed.') + self.log("debug", "Runtime resumed.") def _parse_runtime_response(self, response: requests.Response): start_response = response.json() - self.runtime_id = start_response['runtime_id'] - self.runtime_url = start_response['url'] - if 'session_api_key' in start_response: + self.runtime_id = start_response["runtime_id"] + self.runtime_url = start_response["url"] + if "session_api_key" in start_response: self.session.headers.update( - {'X-Session-API-Key': start_response['session_api_key']} + {"X-Session-API-Key": start_response["session_api_key"]} ) @property def vscode_url(self) -> str | None: if self.vscode_enabled and self._runtime_initialized: if ( - hasattr(self, '_vscode_url') and self._vscode_url is not None + hasattr(self, "_vscode_url") and self._vscode_url is not None ): # cached value return self._vscode_url with self._send_request( - 'GET', - f'{self.runtime_url}/vscode/connection_token', + "GET", + f"{self.runtime_url}/vscode/connection_token", timeout=10, ) as response: response_json = response.json() assert isinstance(response_json, dict) - if response_json['token'] is None: + if response_json["token"] is None: return None # parse runtime_url to get vscode_url _parsed_url = urlparse(self.runtime_url) @@ -302,8 +307,8 @@ def vscode_url(self) -> str | None: ) self._vscode_url = f'{_parsed_url.scheme}://vscode-{_parsed_url.netloc}/?tkn={response_json["token"]}&folder={self.config.workspace_mount_path_in_sandbox}' self.log( - 'debug', - f'VSCode URL: {self._vscode_url}', + "debug", + f"VSCode URL: {self._vscode_url}", ) return self._vscode_url else: @@ -322,57 +327,57 @@ def _wait_until_alive(self): return retry_decorator(self._wait_until_alive_impl)() def _wait_until_alive_impl(self): - self.log('debug', f'Waiting for runtime to be alive at url: {self.runtime_url}') + self.log("debug", f"Waiting for runtime to be alive at url: {self.runtime_url}") with self._send_request( - 'GET', - f'{self.config.sandbox.remote_runtime_api_url}/sessions/{self.sid}', + "GET", + f"{self.config.sandbox.remote_runtime_api_url}/sessions/{self.sid}", ) as runtime_info_response: runtime_data = runtime_info_response.json() - assert 'runtime_id' in runtime_data - assert runtime_data['runtime_id'] == self.runtime_id - assert 'pod_status' in runtime_data - pod_status = runtime_data['pod_status'] - self.log('debug', f'Pod status: {pod_status}') + assert "runtime_id" in runtime_data + assert runtime_data["runtime_id"] == self.runtime_id + assert "pod_status" in runtime_data + pod_status = runtime_data["pod_status"] + self.log("debug", f"Pod status: {pod_status}") # FIXME: We should fix it at the backend of /start endpoint, make sure # the pod is created before returning the response. # Retry a period of time to give the cluster time to start the pod - if pod_status == 'Ready': + if pod_status == "Ready": try: with self._send_request( - 'GET', - f'{self.runtime_url}/alive', + "GET", + f"{self.runtime_url}/alive", ): # will raise exception if we don't get 200 back. pass except requests.HTTPError as e: self.log( - 'warning', f"Runtime /alive failed, but pod says it's ready: {e}" + "warning", f"Runtime /alive failed, but pod says it's ready: {e}" ) raise RuntimeNotReadyError( - f'Runtime /alive failed to respond with 200: {e}' + f"Runtime /alive failed to respond with 200: {e}" ) return elif ( - pod_status == 'Not Found' - or pod_status == 'Pending' - or pod_status == 'Running' + pod_status == "Not Found" + or pod_status == "Pending" + or pod_status == "Running" ): # nb: Running is not yet Ready raise RuntimeNotReadyError( - f'Runtime (ID={self.runtime_id}) is not yet ready. Status: {pod_status}' + f"Runtime (ID={self.runtime_id}) is not yet ready. Status: {pod_status}" ) - elif pod_status in ('Failed', 'Unknown'): + elif pod_status in ("Failed", "Unknown"): # clean up the runtime self.close() raise RuntimeError( - f'Runtime (ID={self.runtime_id}) failed to start. Current status: {pod_status}' + f"Runtime (ID={self.runtime_id}) failed to start. Current status: {pod_status}" ) else: # Maybe this should be a hard failure, but passing through in case the API changes - self.log('warning', f'Unknown pod status: {pod_status}') + self.log("warning", f"Unknown pod status: {pod_status}") self.log( - 'debug', - f'Waiting for runtime pod to be active. Current status: {pod_status}', + "debug", + f"Waiting for runtime pod to be active. Current status: {pod_status}", ) raise RuntimeNotReadyError() @@ -383,13 +388,13 @@ def close(self, timeout: int = 10): if self.runtime_id and self.session: try: with self._send_request( - 'POST', - f'{self.config.sandbox.remote_runtime_api_url}/stop', + "POST", + f"{self.config.sandbox.remote_runtime_api_url}/stop", is_retry=False, - json={'runtime_id': self.runtime_id}, + json={"runtime_id": self.runtime_id}, timeout=timeout, ): - self.log('debug', 'Runtime stopped.') + self.log("debug", "Runtime stopped.") except Exception as e: raise e finally: @@ -402,24 +407,24 @@ def run_action(self, action: Action, is_retry: bool = False) -> Observation: return self.edit(action) with self.action_semaphore: if not action.runnable: - return NullObservation('') + return NullObservation("") action_type = action.action # type: ignore[attr-defined] if action_type not in ACTION_TYPE_TO_CLASS: - raise ValueError(f'Action {action_type} does not exist.') + raise ValueError(f"Action {action_type} does not exist.") if not hasattr(self, action_type): return ErrorObservation( - f'[Runtime (ID={self.runtime_id})] Action {action_type} is not supported in the current runtime.', - error_id='AGENT_ERROR$BAD_ACTION', + f"[Runtime (ID={self.runtime_id})] Action {action_type} is not supported in the current runtime.", + error_id="AGENT_ERROR$BAD_ACTION", ) assert action.timeout is not None try: - request_body = {'action': event_to_dict(action)} - self.log('debug', f'Request body: {request_body}') + request_body = {"action": event_to_dict(action)} + self.log("debug", f"Request body: {request_body}") with self._send_request( - 'POST', - f'{self.runtime_url}/execute_action', + "POST", + f"{self.runtime_url}/execute_action", is_retry=False, json=request_body, # wait a few more seconds to get the timeout error from client side @@ -430,7 +435,7 @@ def run_action(self, action: Action, is_retry: bool = False) -> Observation: obs._cause = action.id # type: ignore[attr-defined] except requests.Timeout: raise RuntimeError( - f'Runtime failed to return execute_action before the requested timeout of {action.timeout}s' + f"Runtime failed to return execute_action before the requested timeout of {action.timeout}s" ) return obs @@ -439,16 +444,16 @@ def _send_request(self, method, url, is_retry=False, **kwargs): try: return send_request(self.session, method, url, **kwargs) except requests.Timeout: - self.log('error', 'No response received within the timeout period.') + self.log("error", "No response received within the timeout period.") raise except requests.HTTPError as e: if is_runtime_request and e.response.status_code == 404: raise RuntimeDisconnectedError( - f'404 error while connecting to {self.runtime_url}' + f"404 error while connecting to {self.runtime_url}" ) elif is_runtime_request and e.response.status_code == 503: if not is_retry: - self.log('warning', 'Runtime appears to be paused. Resuming...') + self.log("warning", "Runtime appears to be paused. Resuming...") self._resume_runtime() self._wait_until_alive() return self._send_request(method, url, True, **kwargs) @@ -480,16 +485,16 @@ def copy_to( self, host_src: str, sandbox_dest: str, recursive: bool = False ) -> None: if not os.path.exists(host_src): - raise FileNotFoundError(f'Source file {host_src} does not exist') + raise FileNotFoundError(f"Source file {host_src} does not exist") try: if recursive: with tempfile.NamedTemporaryFile( - suffix='.zip', delete=False + suffix=".zip", delete=False ) as temp_zip: temp_zip_path = temp_zip.name - with ZipFile(temp_zip_path, 'w') as zipf: + with ZipFile(temp_zip_path, "w") as zipf: for root, _, files in os.walk(host_src): for file in files: file_path = os.path.join(root, file) @@ -498,39 +503,39 @@ def copy_to( ) zipf.write(file_path, arcname) - upload_data = {'file': open(temp_zip_path, 'rb')} + upload_data = {"file": open(temp_zip_path, "rb")} else: - upload_data = {'file': open(host_src, 'rb')} + upload_data = {"file": open(host_src, "rb")} - params = {'destination': sandbox_dest, 'recursive': str(recursive).lower()} + params = {"destination": sandbox_dest, "recursive": str(recursive).lower()} with self._send_request( - 'POST', - f'{self.runtime_url}/upload_file', + "POST", + f"{self.runtime_url}/upload_file", is_retry=False, files=upload_data, params=params, timeout=300, ) as response: self.log( - 'debug', - f'Copy completed: host:{host_src} -> runtime:{sandbox_dest}. Response: {response.text}', + "debug", + f"Copy completed: host:{host_src} -> runtime:{sandbox_dest}. Response: {response.text}", ) finally: if recursive: os.unlink(temp_zip_path) self.log( - 'debug', f'Copy completed: host:{host_src} -> runtime:{sandbox_dest}' + "debug", f"Copy completed: host:{host_src} -> runtime:{sandbox_dest}" ) def list_files(self, path: str | None = None) -> list[str]: data = {} if path is not None: - data['path'] = path + data["path"] = path with self._send_request( - 'POST', - f'{self.runtime_url}/list_files', + "POST", + f"{self.runtime_url}/list_files", is_retry=False, json=data, timeout=30, @@ -541,10 +546,10 @@ def list_files(self, path: str | None = None) -> list[str]: def copy_from(self, path: str) -> Path: """Zip all files in the sandbox and return as a stream of bytes.""" - params = {'path': path} + params = {"path": path} with self._send_request( - 'GET', - f'{self.runtime_url}/download_files', + "GET", + f"{self.runtime_url}/download_files", is_retry=False, params=params, stream=True, diff --git a/openhands/utils/shutdown_listener.py b/openhands/utils/shutdown_listener.py index 1d29c1a53775..e72667bf4763 100644 --- a/openhands/utils/shutdown_listener.py +++ b/openhands/utils/shutdown_listener.py @@ -19,7 +19,7 @@ def _register_signal_handler(sig: signal.Signals): original_handler = None def handler(sig_: int, frame: FrameType | None): - logger.info(f"shutdown_signal:{sig_}") + logger.info(f'shutdown_signal:{sig_}') global _should_exit _should_exit = True if original_handler: @@ -34,15 +34,15 @@ def _register_signal_handlers(): return _should_exit = False - logger.info("_register_signal_handlers") + logger.info('_register_signal_handlers') # Check if we're in the main thread of the main interpreter if threading.current_thread() is threading.main_thread(): - logger.info("_register_signal_handlers:main_thread") + logger.info('_register_signal_handlers:main_thread') for sig in HANDLED_SIGNALS: _register_signal_handler(sig) else: - logger.info("_register_signal_handlers:not_main_thread") + logger.info('_register_signal_handlers:not_main_thread') def should_exit() -> bool: From 301a63df36d00dede4fcf6ccaa8322f74edb3208 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Mon, 25 Nov 2024 13:54:39 -0700 Subject: [PATCH 82/91] Not stopping while running --- openhands/controller/agent_controller.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/openhands/controller/agent_controller.py b/openhands/controller/agent_controller.py index e0fa0dab0384..c5466dd6e71b 100644 --- a/openhands/controller/agent_controller.py +++ b/openhands/controller/agent_controller.py @@ -197,7 +197,9 @@ async def start_step_loop(self): """The main loop for the agent's step-by-step execution.""" self.log('info', 'Starting step loop...') - while should_continue(): + while True: + if not self._is_awaiting_observation() and not should_continue(): + break if self._closed: break try: @@ -903,3 +905,11 @@ def __repr__(self): f'state={self.state!r}, agent_task={self.agent_task!r}, ' f'delegate={self.delegate!r}, _pending_action={self._pending_action!r})' ) + + def _is_awaiting_observation(self): + events = self.event_stream.get_events(reverse=True) + for event in events: + if isinstance(event, AgentStateChangedObservation): + result = event.agent_state == AgentState.RUNNING + return result + return False From 784c70483abf02b66234b93c14247c934f1e4b6b Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Mon, 25 Nov 2024 13:57:22 -0700 Subject: [PATCH 83/91] Lint fixes --- openhands/controller/agent_controller.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openhands/controller/agent_controller.py b/openhands/controller/agent_controller.py index c5466dd6e71b..df8e21030b4c 100644 --- a/openhands/controller/agent_controller.py +++ b/openhands/controller/agent_controller.py @@ -905,7 +905,7 @@ def __repr__(self): f'state={self.state!r}, agent_task={self.agent_task!r}, ' f'delegate={self.delegate!r}, _pending_action={self._pending_action!r})' ) - + def _is_awaiting_observation(self): events = self.event_stream.get_events(reverse=True) for event in events: From eb50b1332b4cd498f2b3f9a47eb3adf49326e744 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Mon, 25 Nov 2024 14:02:05 -0700 Subject: [PATCH 84/91] Implemented suggestion --- openhands/runtime/impl/eventstream/eventstream_runtime.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openhands/runtime/impl/eventstream/eventstream_runtime.py b/openhands/runtime/impl/eventstream/eventstream_runtime.py index 32712ff3a97d..edf4f659281d 100644 --- a/openhands/runtime/impl/eventstream/eventstream_runtime.py +++ b/openhands/runtime/impl/eventstream/eventstream_runtime.py @@ -179,7 +179,6 @@ def __init__( self.docker_client: docker.DockerClient = self._init_docker_client() self.base_container_image = self.config.sandbox.base_container_image self.runtime_container_image = self.config.sandbox.runtime_container_image - self.rm_all_containers = self.config.sandbox.rm_all_containers self.container_name = CONTAINER_NAME_PREFIX + sid self.container = None self.action_semaphore = threading.Semaphore(1) # Ensure one action at a time @@ -472,7 +471,7 @@ def close(self): if self.config.sandbox.keep_runtime_alive or self.attach_to_existing: return close_prefix = ( - CONTAINER_NAME_PREFIX if self.rm_all_containers else self.container_name + CONTAINER_NAME_PREFIX if self.config.sandbox.rm_all_containers else self.container_name ) remove_all_containers(close_prefix) From ec21b3a9117cf4aab5f79d1a4f5f3d39ea9fb52a Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Mon, 25 Nov 2024 14:05:54 -0700 Subject: [PATCH 85/91] Reverted remove all containers --- openhands/runtime/impl/eventstream/eventstream_runtime.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/openhands/runtime/impl/eventstream/eventstream_runtime.py b/openhands/runtime/impl/eventstream/eventstream_runtime.py index edf4f659281d..30f78f88a2de 100644 --- a/openhands/runtime/impl/eventstream/eventstream_runtime.py +++ b/openhands/runtime/impl/eventstream/eventstream_runtime.py @@ -456,7 +456,7 @@ def _wait_until_alive(self): ): pass - def close(self): + def close(self, rm_all_containers: bool | None = None): """Closes the EventStreamRuntime and associated objects Parameters: @@ -468,10 +468,13 @@ def close(self): if self.session: self.session.close() + if rm_all_containers is None: + rm_all_containers = self.config.sandbox.rm_all_containers + if self.config.sandbox.keep_runtime_alive or self.attach_to_existing: return close_prefix = ( - CONTAINER_NAME_PREFIX if self.config.sandbox.rm_all_containers else self.container_name + CONTAINER_NAME_PREFIX if rm_all_containers else self.container_name ) remove_all_containers(close_prefix) From 7a58b0e6dd61d24b44fd12077c9c85d5c6c5d711 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Mon, 25 Nov 2024 14:15:46 -0700 Subject: [PATCH 86/91] Ruff revert --- .../runtime/impl/remote/remote_runtime.py | 252 +++++++++--------- 1 file changed, 126 insertions(+), 126 deletions(-) diff --git a/openhands/runtime/impl/remote/remote_runtime.py b/openhands/runtime/impl/remote/remote_runtime.py index 8db7510ba848..00596fd5acd7 100644 --- a/openhands/runtime/impl/remote/remote_runtime.py +++ b/openhands/runtime/impl/remote/remote_runtime.py @@ -79,15 +79,15 @@ def __init__( ) if self.config.sandbox.api_key is None: raise ValueError( - "API key is required to use the remote runtime. " - "Please set the API key in the config (config.toml) or as an environment variable (SANDBOX_API_KEY)." + 'API key is required to use the remote runtime. ' + 'Please set the API key in the config (config.toml) or as an environment variable (SANDBOX_API_KEY).' ) - self.session.headers.update({"X-API-Key": self.config.sandbox.api_key}) + self.session.headers.update({'X-API-Key': self.config.sandbox.api_key}) if self.config.workspace_base is not None: self.log( - "debug", - "Setting workspace_base is not supported in the remote runtime.", + 'debug', + 'Setting workspace_base is not supported in the remote runtime.', ) self.runtime_builder = RemoteRuntimeBuilder( @@ -102,7 +102,7 @@ async def connect(self): try: await call_sync_from_async(self._start_or_attach_to_runtime) except RuntimeNotReadyError: - self.log("error", "Runtime failed to start, timed out before ready") + self.log('error', 'Runtime failed to start, timed out before ready') raise await call_sync_from_async(self.setup_initial_env) self._runtime_initialized = True @@ -110,93 +110,93 @@ async def connect(self): def _start_or_attach_to_runtime(self): existing_runtime = self._check_existing_runtime() if existing_runtime: - self.log("debug", f"Using existing runtime with ID: {self.runtime_id}") + self.log('debug', f'Using existing runtime with ID: {self.runtime_id}') elif self.attach_to_existing: raise RuntimeNotFoundError( f"Could not find existing runtime for SID: {self.sid}" ) else: - self.send_status_message("STATUS$STARTING_CONTAINER") + self.send_status_message('STATUS$STARTING_CONTAINER') if self.config.sandbox.runtime_container_image is None: self.log( - "info", - f"Building remote runtime with base image: {self.config.sandbox.base_container_image}", + 'info', + f'Building remote runtime with base image: {self.config.sandbox.base_container_image}', ) self._build_runtime() else: self.log( - "info", - f"Starting remote runtime with image: {self.config.sandbox.runtime_container_image}", + 'info', + f'Starting remote runtime with image: {self.config.sandbox.runtime_container_image}', ) self.container_image = self.config.sandbox.runtime_container_image self._start_runtime() assert ( self.runtime_id is not None - ), "Runtime ID is not set. This should never happen." + ), 'Runtime ID is not set. This should never happen.' assert ( self.runtime_url is not None - ), "Runtime URL is not set. This should never happen." + ), 'Runtime URL is not set. This should never happen.' self.send_status_message("STATUS$WAITING_FOR_CLIENT") if not self.attach_to_existing: - self.log("info", "Waiting for runtime to be alive...") + self.log('info', 'Waiting for runtime to be alive...') self._wait_until_alive() if not self.attach_to_existing: - self.log("info", "Runtime is ready.") - self.send_status_message(" ") + self.log('info', 'Runtime is ready.') + self.send_status_message(' ') def _check_existing_runtime(self) -> bool: try: with self._send_request( - "GET", - f"{self.config.sandbox.remote_runtime_api_url}/sessions/{self.sid}", + 'GET', + f'{self.config.sandbox.remote_runtime_api_url}/sessions/{self.sid}', is_retry=False, timeout=5, ) as response: data = response.json() - status = data.get("status") - if status == "running" or status == "paused": + status = data.get('status') + if status == 'running' or status == 'paused': self._parse_runtime_response(response) except requests.HTTPError as e: if e.response.status_code == 404: return False - self.log("debug", f"Error while looking for remote runtime: {e}") + self.log('debug', f'Error while looking for remote runtime: {e}') raise - if status == "running": + if status == 'running': return True - elif status == "stopped": - self.log("debug", "Found existing remote runtime, but it is stopped") + elif status == 'stopped': + self.log('debug', 'Found existing remote runtime, but it is stopped') return False - elif status == "paused": - self.log("debug", "Found existing remote runtime, but it is paused") + elif status == 'paused': + self.log('debug', 'Found existing remote runtime, but it is paused') self._resume_runtime() return True else: - self.log("error", f"Invalid response from runtime API: {data}") + self.log('error', f'Invalid response from runtime API: {data}') return False def _build_runtime(self): - self.log("debug", f"Building RemoteRuntime config:\n{self.config}") + self.log('debug', f'Building RemoteRuntime config:\n{self.config}') with self._send_request( - "GET", - f"{self.config.sandbox.remote_runtime_api_url}/registry_prefix", + 'GET', + f'{self.config.sandbox.remote_runtime_api_url}/registry_prefix', is_retry=False, timeout=10, ) as response: response_json = response.json() - registry_prefix = response_json["registry_prefix"] - os.environ["OH_RUNTIME_RUNTIME_IMAGE_REPO"] = ( - registry_prefix.rstrip("/") + "/runtime" + registry_prefix = response_json['registry_prefix'] + os.environ['OH_RUNTIME_RUNTIME_IMAGE_REPO'] = ( + registry_prefix.rstrip('/') + '/runtime' ) self.log( - "debug", + 'debug', f'Runtime image repo: {os.environ["OH_RUNTIME_RUNTIME_IMAGE_REPO"]}', ) if self.config.sandbox.runtime_extra_deps: self.log( - "debug", - f"Installing extra user-provided dependencies in the runtime image: {self.config.sandbox.runtime_extra_deps}", + 'debug', + f'Installing extra user-provided dependencies in the runtime image: {self.config.sandbox.runtime_extra_deps}', ) # Build the container image @@ -209,106 +209,106 @@ def _build_runtime(self): ) with self._send_request( - "GET", - f"{self.config.sandbox.remote_runtime_api_url}/image_exists", + 'GET', + f'{self.config.sandbox.remote_runtime_api_url}/image_exists', is_retry=False, - params={"image": self.container_image}, + params={'image': self.container_image}, timeout=10, ) as response: - if not response.json()["exists"]: + if not response.json()['exists']: raise RuntimeError( - f"Container image {self.container_image} does not exist" + f'Container image {self.container_image} does not exist' ) def _start_runtime(self): # Prepare the request body for the /start endpoint plugin_args = [] if self.plugins is not None and len(self.plugins) > 0: - plugin_args = ["--plugins"] + [plugin.name for plugin in self.plugins] + plugin_args = ['--plugins'] + [plugin.name for plugin in self.plugins] browsergym_args = [] if self.config.sandbox.browsergym_eval_env is not None: browsergym_args = [ - "--browsergym-eval-env" - ] + self.config.sandbox.browsergym_eval_env.split(" ") + '--browsergym-eval-env' + ] + self.config.sandbox.browsergym_eval_env.split(' ') command = get_remote_startup_command( self.port, self.config.workspace_mount_path_in_sandbox, - "openhands" if self.config.run_as_openhands else "root", + 'openhands' if self.config.run_as_openhands else 'root', self.config.sandbox.user_id, plugin_args, browsergym_args, is_root=not self.config.run_as_openhands, # is_root=True when running as root ) start_request = { - "image": self.container_image, - "command": command, - "working_dir": "/openhands/code/", - "environment": {"DEBUG": "true"} if self.config.debug else {}, - "session_id": self.sid, + 'image': self.container_image, + 'command': command, + 'working_dir': '/openhands/code/', + 'environment': {'DEBUG': 'true'} if self.config.debug else {}, + 'session_id': self.sid, } # Start the sandbox using the /start endpoint try: with self._send_request( - "POST", - f"{self.config.sandbox.remote_runtime_api_url}/start", + 'POST', + f'{self.config.sandbox.remote_runtime_api_url}/start', is_retry=False, json=start_request, ) as response: self._parse_runtime_response(response) self.log( - "debug", - f"Runtime started. URL: {self.runtime_url}", + 'debug', + f'Runtime started. URL: {self.runtime_url}', ) except requests.HTTPError as e: raise RuntimeUnavailableError() from e def _resume_runtime(self): with self._send_request( - "POST", - f"{self.config.sandbox.remote_runtime_api_url}/resume", + 'POST', + f'{self.config.sandbox.remote_runtime_api_url}/resume', is_retry=False, - json={"runtime_id": self.runtime_id}, + json={'runtime_id': self.runtime_id}, timeout=30, ): pass - self.log("debug", "Runtime resumed.") + self.log('debug', 'Runtime resumed.') def _parse_runtime_response(self, response: requests.Response): start_response = response.json() - self.runtime_id = start_response["runtime_id"] - self.runtime_url = start_response["url"] - if "session_api_key" in start_response: + self.runtime_id = start_response['runtime_id'] + self.runtime_url = start_response['url'] + if 'session_api_key' in start_response: self.session.headers.update( - {"X-Session-API-Key": start_response["session_api_key"]} + {'X-Session-API-Key': start_response['session_api_key']} ) @property def vscode_url(self) -> str | None: if self.vscode_enabled and self._runtime_initialized: if ( - hasattr(self, "_vscode_url") and self._vscode_url is not None + hasattr(self, '_vscode_url') and self._vscode_url is not None ): # cached value return self._vscode_url with self._send_request( - "GET", - f"{self.runtime_url}/vscode/connection_token", + 'GET', + f'{self.runtime_url}/vscode/connection_token', timeout=10, ) as response: response_json = response.json() assert isinstance(response_json, dict) - if response_json["token"] is None: + if response_json['token'] is None: return None # parse runtime_url to get vscode_url _parsed_url = urlparse(self.runtime_url) assert isinstance(_parsed_url.scheme, str) and isinstance( _parsed_url.netloc, str ) - self._vscode_url = f'{_parsed_url.scheme}://vscode-{_parsed_url.netloc}/?tkn={response_json["token"]}&folder={self.config.workspace_mount_path_in_sandbox}' + self._vscode_url = f'{_parsed_url.scheme}://vscode-{_parsed_url.netloc}/?tkn={response_json['token']}&folder={self.config.workspace_mount_path_in_sandbox}' self.log( - "debug", - f"VSCode URL: {self._vscode_url}", + 'debug', + f'VSCode URL: {self._vscode_url}', ) return self._vscode_url else: @@ -327,57 +327,57 @@ def _wait_until_alive(self): return retry_decorator(self._wait_until_alive_impl)() def _wait_until_alive_impl(self): - self.log("debug", f"Waiting for runtime to be alive at url: {self.runtime_url}") + self.log('debug', f'Waiting for runtime to be alive at url: {self.runtime_url}') with self._send_request( - "GET", - f"{self.config.sandbox.remote_runtime_api_url}/sessions/{self.sid}", + 'GET', + f'{self.config.sandbox.remote_runtime_api_url}/sessions/{self.sid}', ) as runtime_info_response: runtime_data = runtime_info_response.json() - assert "runtime_id" in runtime_data - assert runtime_data["runtime_id"] == self.runtime_id - assert "pod_status" in runtime_data - pod_status = runtime_data["pod_status"] - self.log("debug", f"Pod status: {pod_status}") + assert 'runtime_id' in runtime_data + assert runtime_data['runtime_id'] == self.runtime_id + assert 'pod_status' in runtime_data + pod_status = runtime_data['pod_status'] + self.log('debug', f'Pod status: {pod_status}') # FIXME: We should fix it at the backend of /start endpoint, make sure # the pod is created before returning the response. # Retry a period of time to give the cluster time to start the pod - if pod_status == "Ready": + if pod_status == 'Ready': try: with self._send_request( - "GET", - f"{self.runtime_url}/alive", + 'GET', + f'{self.runtime_url}/alive', ): # will raise exception if we don't get 200 back. pass except requests.HTTPError as e: self.log( - "warning", f"Runtime /alive failed, but pod says it's ready: {e}" + 'warning', f"Runtime /alive failed, but pod says it's ready: {e}" ) raise RuntimeNotReadyError( - f"Runtime /alive failed to respond with 200: {e}" + f'Runtime /alive failed to respond with 200: {e}' ) return elif ( - pod_status == "Not Found" - or pod_status == "Pending" - or pod_status == "Running" + pod_status == 'Not Found' + or pod_status == 'Pending' + or pod_status == 'Running' ): # nb: Running is not yet Ready raise RuntimeNotReadyError( - f"Runtime (ID={self.runtime_id}) is not yet ready. Status: {pod_status}" + f'Runtime (ID={self.runtime_id}) is not yet ready. Status: {pod_status}' ) - elif pod_status in ("Failed", "Unknown"): + elif pod_status in ('Failed', 'Unknown'): # clean up the runtime self.close() raise RuntimeError( - f"Runtime (ID={self.runtime_id}) failed to start. Current status: {pod_status}" + f'Runtime (ID={self.runtime_id}) failed to start. Current status: {pod_status}' ) else: # Maybe this should be a hard failure, but passing through in case the API changes - self.log("warning", f"Unknown pod status: {pod_status}") + self.log('warning', f'Unknown pod status: {pod_status}') self.log( - "debug", - f"Waiting for runtime pod to be active. Current status: {pod_status}", + 'debug', + f'Waiting for runtime pod to be active. Current status: {pod_status}', ) raise RuntimeNotReadyError() @@ -388,13 +388,13 @@ def close(self, timeout: int = 10): if self.runtime_id and self.session: try: with self._send_request( - "POST", - f"{self.config.sandbox.remote_runtime_api_url}/stop", + 'POST', + f'{self.config.sandbox.remote_runtime_api_url}/stop', is_retry=False, - json={"runtime_id": self.runtime_id}, + json={'runtime_id': self.runtime_id}, timeout=timeout, ): - self.log("debug", "Runtime stopped.") + self.log('debug', 'Runtime stopped.') except Exception as e: raise e finally: @@ -407,24 +407,24 @@ def run_action(self, action: Action, is_retry: bool = False) -> Observation: return self.edit(action) with self.action_semaphore: if not action.runnable: - return NullObservation("") + return NullObservation('') action_type = action.action # type: ignore[attr-defined] if action_type not in ACTION_TYPE_TO_CLASS: - raise ValueError(f"Action {action_type} does not exist.") + raise ValueError(f'Action {action_type} does not exist.') if not hasattr(self, action_type): return ErrorObservation( - f"[Runtime (ID={self.runtime_id})] Action {action_type} is not supported in the current runtime.", - error_id="AGENT_ERROR$BAD_ACTION", + f'[Runtime (ID={self.runtime_id})] Action {action_type} is not supported in the current runtime.', + error_id='AGENT_ERROR$BAD_ACTION', ) assert action.timeout is not None try: - request_body = {"action": event_to_dict(action)} - self.log("debug", f"Request body: {request_body}") + request_body = {'action': event_to_dict(action)} + self.log('debug', f'Request body: {request_body}') with self._send_request( - "POST", - f"{self.runtime_url}/execute_action", + 'POST', + f'{self.runtime_url}/execute_action', is_retry=False, json=request_body, # wait a few more seconds to get the timeout error from client side @@ -435,7 +435,7 @@ def run_action(self, action: Action, is_retry: bool = False) -> Observation: obs._cause = action.id # type: ignore[attr-defined] except requests.Timeout: raise RuntimeError( - f"Runtime failed to return execute_action before the requested timeout of {action.timeout}s" + f'Runtime failed to return execute_action before the requested timeout of {action.timeout}s' ) return obs @@ -444,16 +444,16 @@ def _send_request(self, method, url, is_retry=False, **kwargs): try: return send_request(self.session, method, url, **kwargs) except requests.Timeout: - self.log("error", "No response received within the timeout period.") + self.log('error', 'No response received within the timeout period.') raise except requests.HTTPError as e: if is_runtime_request and e.response.status_code == 404: raise RuntimeDisconnectedError( - f"404 error while connecting to {self.runtime_url}" + f'404 error while connecting to {self.runtime_url}' ) elif is_runtime_request and e.response.status_code == 503: if not is_retry: - self.log("warning", "Runtime appears to be paused. Resuming...") + self.log('warning', 'Runtime appears to be paused. Resuming...') self._resume_runtime() self._wait_until_alive() return self._send_request(method, url, True, **kwargs) @@ -485,16 +485,16 @@ def copy_to( self, host_src: str, sandbox_dest: str, recursive: bool = False ) -> None: if not os.path.exists(host_src): - raise FileNotFoundError(f"Source file {host_src} does not exist") + raise FileNotFoundError(f'Source file {host_src} does not exist') try: if recursive: with tempfile.NamedTemporaryFile( - suffix=".zip", delete=False + suffix='.zip', delete=False ) as temp_zip: temp_zip_path = temp_zip.name - with ZipFile(temp_zip_path, "w") as zipf: + with ZipFile(temp_zip_path, 'w') as zipf: for root, _, files in os.walk(host_src): for file in files: file_path = os.path.join(root, file) @@ -503,39 +503,39 @@ def copy_to( ) zipf.write(file_path, arcname) - upload_data = {"file": open(temp_zip_path, "rb")} + upload_data = {'file': open(temp_zip_path, 'rb')} else: - upload_data = {"file": open(host_src, "rb")} + upload_data = {'file': open(host_src, 'rb')} - params = {"destination": sandbox_dest, "recursive": str(recursive).lower()} + params = {'destination': sandbox_dest, 'recursive': str(recursive).lower()} with self._send_request( - "POST", - f"{self.runtime_url}/upload_file", + 'POST', + f'{self.runtime_url}/upload_file', is_retry=False, files=upload_data, params=params, timeout=300, ) as response: self.log( - "debug", - f"Copy completed: host:{host_src} -> runtime:{sandbox_dest}. Response: {response.text}", + 'debug', + f'Copy completed: host:{host_src} -> runtime:{sandbox_dest}. Response: {response.text}', ) finally: if recursive: os.unlink(temp_zip_path) self.log( - "debug", f"Copy completed: host:{host_src} -> runtime:{sandbox_dest}" + 'debug', f'Copy completed: host:{host_src} -> runtime:{sandbox_dest}' ) def list_files(self, path: str | None = None) -> list[str]: data = {} if path is not None: - data["path"] = path + data['path'] = path with self._send_request( - "POST", - f"{self.runtime_url}/list_files", + 'POST', + f'{self.runtime_url}/list_files', is_retry=False, json=data, timeout=30, @@ -545,11 +545,11 @@ def list_files(self, path: str | None = None) -> list[str]: return response_json def copy_from(self, path: str) -> Path: - """Zip all files in the sandbox and return as a stream of bytes.""" - params = {"path": path} + '''Zip all files in the sandbox and return as a stream of bytes.''' + params = {'path': path} with self._send_request( - "GET", - f"{self.runtime_url}/download_files", + 'GET', + f'{self.runtime_url}/download_files', is_retry=False, params=params, stream=True, From b36120c2b3f77315970f3c0cec8e36d2f5b1240a Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Mon, 25 Nov 2024 14:16:59 -0700 Subject: [PATCH 87/91] WIP --- openhands/runtime/impl/remote/remote_runtime.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openhands/runtime/impl/remote/remote_runtime.py b/openhands/runtime/impl/remote/remote_runtime.py index 00596fd5acd7..9baa00371c3a 100644 --- a/openhands/runtime/impl/remote/remote_runtime.py +++ b/openhands/runtime/impl/remote/remote_runtime.py @@ -56,7 +56,7 @@ def __init__( self, config: AppConfig, event_stream: EventStream, - sid: str = "default", + sid: str = 'default', plugins: list[PluginRequirement] | None = None, env_vars: dict[str, str] | None = None, status_callback: Optional[Callable] = None, @@ -113,7 +113,7 @@ def _start_or_attach_to_runtime(self): self.log('debug', f'Using existing runtime with ID: {self.runtime_id}') elif self.attach_to_existing: raise RuntimeNotFoundError( - f"Could not find existing runtime for SID: {self.sid}" + f'Could not find existing runtime for SID: {self.sid}' ) else: self.send_status_message('STATUS$STARTING_CONTAINER') @@ -136,7 +136,7 @@ def _start_or_attach_to_runtime(self): assert ( self.runtime_url is not None ), 'Runtime URL is not set. This should never happen.' - self.send_status_message("STATUS$WAITING_FOR_CLIENT") + self.send_status_message('STATUS$WAITING_FOR_CLIENT') if not self.attach_to_existing: self.log('info', 'Waiting for runtime to be alive...') self._wait_until_alive() From 9c7ce40db8ebda46a40d475f9076c4098838968a Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Mon, 25 Nov 2024 14:18:15 -0700 Subject: [PATCH 88/91] WIP --- openhands/runtime/impl/remote/remote_runtime.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openhands/runtime/impl/remote/remote_runtime.py b/openhands/runtime/impl/remote/remote_runtime.py index 9baa00371c3a..871235b28b45 100644 --- a/openhands/runtime/impl/remote/remote_runtime.py +++ b/openhands/runtime/impl/remote/remote_runtime.py @@ -305,7 +305,7 @@ def vscode_url(self) -> str | None: assert isinstance(_parsed_url.scheme, str) and isinstance( _parsed_url.netloc, str ) - self._vscode_url = f'{_parsed_url.scheme}://vscode-{_parsed_url.netloc}/?tkn={response_json['token']}&folder={self.config.workspace_mount_path_in_sandbox}' + self._vscode_url = f'{_parsed_url.scheme}://vscode-{_parsed_url.netloc}/?tkn={response_json["token"]}&folder={self.config.workspace_mount_path_in_sandbox}' self.log( 'debug', f'VSCode URL: {self._vscode_url}', @@ -545,7 +545,7 @@ def list_files(self, path: str | None = None) -> list[str]: return response_json def copy_from(self, path: str) -> Path: - '''Zip all files in the sandbox and return as a stream of bytes.''' + """Zip all files in the sandbox and return as a stream of bytes.""" params = {'path': path} with self._send_request( 'GET', From 503a55366e3ece46557b50de15b0ccc4197210ec Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Mon, 25 Nov 2024 14:22:26 -0700 Subject: [PATCH 89/91] WIP --- openhands/runtime/impl/runloop/runloop_runtime.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openhands/runtime/impl/runloop/runloop_runtime.py b/openhands/runtime/impl/runloop/runloop_runtime.py index 0b9310cc3021..20f0ea46a1b8 100644 --- a/openhands/runtime/impl/runloop/runloop_runtime.py +++ b/openhands/runtime/impl/runloop/runloop_runtime.py @@ -261,7 +261,7 @@ def _wait_until_alive(self): logger.error(msg) raise RuntimeError(msg) - def close(self, rm_all_containers: bool = True): + def close(self, rm_all_containers: bool | None = True): if self.log_buffer: self.log_buffer.close() From 1469b2a4a425cba9774e60504ed4fd8f35483c61 Mon Sep 17 00:00:00 2001 From: Robert Brennan Date: Mon, 25 Nov 2024 17:09:03 -0500 Subject: [PATCH 90/91] Update openhands/runtime/impl/remote/remote_runtime.py --- openhands/runtime/impl/remote/remote_runtime.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openhands/runtime/impl/remote/remote_runtime.py b/openhands/runtime/impl/remote/remote_runtime.py index 871235b28b45..548e8a0ee23e 100644 --- a/openhands/runtime/impl/remote/remote_runtime.py +++ b/openhands/runtime/impl/remote/remote_runtime.py @@ -261,6 +261,7 @@ def _start_runtime(self): f'Runtime started. URL: {self.runtime_url}', ) except requests.HTTPError as e: + self.log('error', f'Unable to start runtime: {e}') raise RuntimeUnavailableError() from e def _resume_runtime(self): From b89d011cdf0671a9afa2edd0fcad8c52b24975c8 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Mon, 25 Nov 2024 16:46:28 -0700 Subject: [PATCH 91/91] Flaky test fix --- tests/unit/test_manager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/test_manager.py b/tests/unit/test_manager.py index 4ede3e9bdc36..d17fc94bf175 100644 --- a/tests/unit/test_manager.py +++ b/tests/unit/test_manager.py @@ -59,7 +59,7 @@ async def test_session_is_running_in_cluster(): ) ) with ( - patch('openhands.server.session.manager._REDIS_POLL_TIMEOUT', 0.02), + patch('openhands.server.session.manager._REDIS_POLL_TIMEOUT', 0.05), ): async with SessionManager( sio, AppConfig(), InMemoryFileStore()