Skip to content

feat: CLI to display the Proxy URL as the agent's Agent's Base API (#… #1916

feat: CLI to display the Proxy URL as the agent's Agent's Base API (#…

feat: CLI to display the Proxy URL as the agent's Agent's Base API (#… #1916

GitHub Actions / Production Test Results failed Oct 6, 2023 in 0s

322 tests run, 312 passed, 1 skipped, 9 failed.

Annotations

Check failure on line 363 in tests/steamship_tests/agents/test_agent_service.py

See this annotation in the file changed.

@github-actions github-actions / Production Test Results

test_agent_service.test_async_prompt

AssertionError: At least 2 llm prompts should have happened (first for tool selection, second for generating final answer)
assert == failed. [pytest-clarity diff shown]
  #x1B[0m
  #x1B[0m#x1B[32mLHS#x1B[0m vs #x1B[31mRHS#x1B[0m shown below
  #x1B[0m
  #x1B[0m#x1B[32m1#x1B[0m
  #x1B[0m#x1B[31m2#x1B[0m
  #x1B[0m
Raw output
client = Steamship(config=Configuration(api_key=SecretStr('**********'), api_base=AnyHttpUrl('https://api.steamship.com/api/v1/...kspace_id='90EA220C-34C0-431B-8674-FEA30D8BDFB5', workspace_handle='test_ssftfbryke', profile='test', request_id=None))

    @pytest.mark.usefixtures("client")
    def test_async_prompt(client: Steamship):
        example_agent_service_path = (
            SRC_PATH / "steamship" / "agents" / "examples" / "example_assistant.py"
        )
        with deploy_package(client, example_agent_service_path, wait_for_init=True) as (
            _,
            _,
            agent_service,
        ):
            context_id = "some_async_fun"
            try:
                streaming_resp = agent_service.invoke(
                    "async_prompt",
                    prompt="who is the current president of the United States?",
                    context_id=context_id,
                )
            except SteamshipError as error:
                pytest.fail(f"failed request: {error}")
    
            # sanity checking on the response
            assert streaming_resp is not None
            assert streaming_resp["file"] is not None
            assert streaming_resp["task"] is not None
    
            # we need to validate that the request id is found via requestId and not request_id
            assert streaming_resp["task"]["requestId"] is not None
    
            file = File(client=client, **(streaming_resp["file"]))
            streaming_task = Task(client=client, **(streaming_resp["task"]))
    
            original_len = len(file.blocks)
    
            # Checking stream only seems to work **after** the Task **starts**
            while streaming_task.state in [TaskState.waiting]:
                # tight loop to check on waiting status of Task
                time.sleep(0.1)
                streaming_task.refresh()
    
            assert streaming_task.state in [TaskState.running]
    
            llm_prompt_event_count = 0
            function_selection_event = False
            tool_execution_event = False
            function_complete_event = False
            assistant_chat_response_event = False
    
            num_blocks = 0
            for block in stream_blocks_for_file(
                client, file_id=file.id, req_id=streaming_task.request_id
            ):
                num_blocks += 1
                for t in block.tags:
                    match t.kind:
                        case TagKind.LLM_STATUS_MESSAGE:
                            if t.name == AgentLogging.PROMPT:
                                llm_prompt_event_count += 1
                        case TagKind.FUNCTION_SELECTION:
                            if t.name == "SearchTool":
                                function_selection_event = True
                        case TagKind.TOOL_STATUS_MESSAGE:
                            tool_execution_event = True
                        case TagKind.ROLE:
                            if t.name == RoleTag.FUNCTION:
                                function_complete_event = True
                        case TagKind.CHAT:
                            if (
                                t.name == ChatTag.ROLE
                                and t.value.get(TagValueKey.STRING_VALUE, "") == RoleTag.ASSISTANT
                            ):
                                assistant_chat_response_event = True
    
            file.refresh()
            assert (
                len(file.blocks) > original_len
            ), "File should have increased in size during AgentService execution"
    
            assert num_blocks > 0, "Blocks should have been streamed during execution"
>           assert llm_prompt_event_count == 2, (
                "At least 2 llm prompts should have happened (first for tool selection, "
                "second for generating final answer)"
            )
E           AssertionError: At least 2 llm prompts should have happened (first for tool selection, second for generating final answer)
E           assert == failed. [pytest-clarity diff shown]
E             #x1B[0m
E             #x1B[0m#x1B[32mLHS#x1B[0m vs #x1B[31mRHS#x1B[0m shown below
E             #x1B[0m
E             #x1B[0m#x1B[32m1#x1B[0m
E             #x1B[0m#x1B[31m2#x1B[0m
E             #x1B[0m

tests/steamship_tests/agents/test_agent_service.py:363: AssertionError

Check failure on line 47 in tests/steamship_tests/app/integration/test_e2e_mixins_indexer_pipeline.py

See this annotation in the file changed.

@github-actions github-actions / Production Test Results

test_e2e_mixins_indexer_pipeline.test_indexer_pipeline_mixin

steamship.base.error.SteamshipError: Task 735CDCFC-40D0-4BFD-AA03-11AD4A6D65D6 did not complete within requested timeout of 180s. The task is still running on the server. You can retrieve its status via Task.get() or try waiting again with wait().
Raw output
client = Steamship(config=Configuration(api_key=SecretStr('**********'), api_base=AnyHttpUrl('https://api.steamship.com/api/v1/...kspace_id='C92ED477-F60B-40CC-B699-64332D93FC05', workspace_handle='test_8oosvzfgm6', profile='test', request_id=None))

    @pytest.mark.usefixtures("client")
    def test_indexer_pipeline_mixin(client: Steamship):
        demo_package_path = PACKAGES_PATH / "package_with_mixin_indexer_pipeline.py"
    
        with deploy_package(client, demo_package_path, wait_for_init=True) as (_, _, instance):
            # Test indexing a pdf file
            pdf_url = "https://steamship.com/test/pdf-test.pdf"
    
            index_task = instance.invoke("index_url", url=pdf_url)
            index_task = Task.parse_obj(index_task)
            index_task.client = client
    
            assert index_task.task_id
            assert index_task.state == TaskState.waiting
    
            index_task.wait()
    
            result = instance.invoke("search_index", query="education", k=1)
            result = SearchResults.parse_obj(result)
            assert len(result.items) == 1
            winner = result.items[0]
            assert winner.tag.text
            assert winner.tag.value
            assert winner.tag.value.get("block_id")  # It has a block id stamped
            assert winner.tag.value.get("file_id")  # It has a file id stamped
            assert winner.tag.value.get("page") == 0
    
            # LOAD THE TAO TE QING PDF
            # This will test metadata
            pdf_url2 = "https://www.with.org/tao_te_ching_en.pdf"
    
            index_task2 = instance.invoke("index_url", url=pdf_url2, metadata={"is_tao": True})
            index_task2 = Task.parse_obj(index_task2)
            index_task2.client = client
    
            assert index_task2.task_id
            assert index_task2.state == TaskState.waiting
    
>           index_task2.wait()

tests/steamship_tests/app/integration/test_e2e_mixins_indexer_pipeline.py:47: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = Task(client=Steamship(config=Configuration(api_key=SecretStr('**********'), api_base=AnyHttpUrl('https://api.steamship...emote_status_message=None, assigned_worker='engine', started_at='2023-10-06T16:44:54Z', max_retries=None, retries=None)
max_timeout_s = 180, retry_delay_s = 1, on_each_refresh = None

    def wait(
        self,
        max_timeout_s: float = 180,
        retry_delay_s: float = 1,
        on_each_refresh: "Optional[Callable[[int, float, Task], None]]" = None,
    ):
        """Polls and blocks until the task has succeeded or failed (or timeout reached).
    
        Parameters
        ----------
        max_timeout_s : int
            Max timeout in seconds. Default: 180s. After this timeout, an exception will be thrown.
            A timeout of -1 is equivalent to no timeout.
        retry_delay_s : float
            Delay between status checks. Default: 1s.
        on_each_refresh : Optional[Callable[[int, float, Task], None]]
            Optional call back you can get after each refresh is made, including success state refreshes.
            The signature represents: (refresh #, total elapsed time, task)
    
            WARNING: Do not pass a long-running function to this variable. It will block the update polling.
        """
        t0 = time.perf_counter()
        refresh_count = 0
        while (
            (max_timeout_s == -1) or (time.perf_counter() - t0 < max_timeout_s)
        ) and self.state not in (
            TaskState.succeeded,
            TaskState.failed,
        ):
            time.sleep(retry_delay_s)
            self.refresh()
            refresh_count += 1
    
            # Possibly make a callback so the caller knows we've tried again
            if on_each_refresh:
                on_each_refresh(refresh_count, time.perf_counter() - t0, self)
    
        # If the task did not complete within the timeout, throw an error
        if self.state not in (TaskState.succeeded, TaskState.failed):
>           raise SteamshipError(
                message=f"Task {self.task_id} did not complete within requested timeout of {max_timeout_s}s. The task is still running on the server. You can retrieve its status via Task.get() or try waiting again with wait()."
            )
E           steamship.base.error.SteamshipError: Task 735CDCFC-40D0-4BFD-AA03-11AD4A6D65D6 did not complete within requested timeout of 180s. The task is still running on the server. You can retrieve its status via Task.get() or try waiting again with wait().

src/steamship/base/tasks.py:277: SteamshipError

Check failure on line 71 in tests/steamship_tests/agents/transports/test_slack.py

See this annotation in the file changed.

@github-actions github-actions / Production Test Results

test_slack

requests.exceptions.ConnectionError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))
Raw output
self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fcc5261c2e0>
method = 'POST', url = '/test_8patlk0fzp/tops-aurora-ysopw/slack_respond_sync'
body = b'{"event": {"type": "message", "channel": "1", "blocks": [{"elements": [{"text": "another test"}]}]}}'
headers = {'User-Agent': 'python-requests/2.28.2', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-aliv...kage-Instance-Id': 'B678A8EA-433B-4EC4-B005-1782149F44F3', 'Content-Length': '100', 'Content-Type': 'application/json'}
retries = Retry(total=0, connect=None, read=False, redirect=None, status=None)
redirect = False, assert_same_host = False
timeout = Timeout(connect=None, read=None, total=None), pool_timeout = None
release_conn = False, chunked = False, body_pos = None
response_kw = {'decode_content': False, 'preload_content': False}
parsed_url = Url(scheme=None, auth=None, host=None, port=None, path='/test_8patlk0fzp/tops-aurora-ysopw/slack_respond_sync', query=None, fragment=None)
destination_scheme = None, conn = None, release_this_conn = True
http_tunnel_required = False, err = None, clean_exit = False

    def urlopen(
        self,
        method,
        url,
        body=None,
        headers=None,
        retries=None,
        redirect=True,
        assert_same_host=True,
        timeout=_Default,
        pool_timeout=None,
        release_conn=None,
        chunked=False,
        body_pos=None,
        **response_kw
    ):
        """
        Get a connection from the pool and perform an HTTP request. This is the
        lowest level call for making a request, so you'll need to specify all
        the raw details.
    
        .. note::
    
           More commonly, it's appropriate to use a convenience method provided
           by :class:`.RequestMethods`, such as :meth:`request`.
    
        .. note::
    
           `release_conn` will only behave as expected if
           `preload_content=False` because we want to make
           `preload_content=False` the default behaviour someday soon without
           breaking backwards compatibility.
    
        :param method:
            HTTP request method (such as GET, POST, PUT, etc.)
    
        :param url:
            The URL to perform the request on.
    
        :param body:
            Data to send in the request body, either :class:`str`, :class:`bytes`,
            an iterable of :class:`str`/:class:`bytes`, or a file-like object.
    
        :param headers:
            Dictionary of custom headers to send, such as User-Agent,
            If-None-Match, etc. If None, pool headers are used. If provided,
            these headers completely replace any pool-specific headers.
    
        :param retries:
            Configure the number of retries to allow before raising a
            :class:`~urllib3.exceptions.MaxRetryError` exception.
    
            Pass ``None`` to retry until you receive a response. Pass a
            :class:`~urllib3.util.retry.Retry` object for fine-grained control
            over different types of retries.
            Pass an integer number to retry connection errors that many times,
            but no other types of errors. Pass zero to never retry.
    
            If ``False``, then retries are disabled and any exception is raised
            immediately. Also, instead of raising a MaxRetryError on redirects,
            the redirect response will be returned.
    
        :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
    
        :param redirect:
            If True, automatically handle redirects (status codes 301, 302,
            303, 307, 308). Each redirect counts as a retry. Disabling retries
            will disable redirect, too.
    
        :param assert_same_host:
            If ``True``, will make sure that the host of the pool requests is
            consistent else will raise HostChangedError. When ``False``, you can
            use the pool on an HTTP proxy and request foreign hosts.
    
        :param timeout:
            If specified, overrides the default timeout for this one
            request. It may be a float (in seconds) or an instance of
            :class:`urllib3.util.Timeout`.
    
        :param pool_timeout:
            If set and the pool is set to block=True, then this method will
            block for ``pool_timeout`` seconds and raise EmptyPoolError if no
            connection is available within the time period.
    
        :param release_conn:
            If False, then the urlopen call will not release the connection
            back into the pool once a response is received (but will release if
            you read the entire contents of the response such as when
            `preload_content=True`). This is useful if you're not preloading
            the response's content immediately. You will need to call
            ``r.release_conn()`` on the response ``r`` to return the connection
            back into the pool. If None, it takes the value of
            ``response_kw.get('preload_content', True)``.
    
        :param chunked:
            If True, urllib3 will send the body using chunked transfer
            encoding. Otherwise, urllib3 will send the body using the standard
            content-length form. Defaults to False.
    
        :param int body_pos:
            Position to seek to in file-like body in the event of a retry or
            redirect. Typically this won't need to be set because urllib3 will
            auto-populate the value when needed.
    
        :param \\**response_kw:
            Additional parameters are passed to
            :meth:`urllib3.response.HTTPResponse.from_httplib`
        """
    
        parsed_url = parse_url(url)
        destination_scheme = parsed_url.scheme
    
        if headers is None:
            headers = self.headers
    
        if not isinstance(retries, Retry):
            retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
    
        if release_conn is None:
            release_conn = response_kw.get("preload_content", True)
    
        # Check host
        if assert_same_host and not self.is_same_host(url):
            raise HostChangedError(self, url, retries)
    
        # Ensure that the URL we're connecting to is properly encoded
        if url.startswith("/"):
            url = six.ensure_str(_encode_target(url))
        else:
            url = six.ensure_str(parsed_url.url)
    
        conn = None
    
        # Track whether `conn` needs to be released before
        # returning/raising/recursing. Update this variable if necessary, and
        # leave `release_conn` constant throughout the function. That way, if
        # the function recurses, the original value of `release_conn` will be
        # passed down into the recursive call, and its value will be respected.
        #
        # See issue #651 [1] for details.
        #
        # [1] <https://github.com/urllib3/urllib3/issues/651>
        release_this_conn = release_conn
    
        http_tunnel_required = connection_requires_http_tunnel(
            self.proxy, self.proxy_config, destination_scheme
        )
    
        # Merge the proxy headers. Only done when not using HTTP CONNECT. We
        # have to copy the headers dict so we can safely change it without those
        # changes being reflected in anyone else's copy.
        if not http_tunnel_required:
            headers = headers.copy()
            headers.update(self.proxy_headers)
    
        # Must keep the exception bound to a separate variable or else Python 3
        # complains about UnboundLocalError.
        err = None
    
        # Keep track of whether we cleanly exited the except block. This
        # ensures we do proper cleanup in finally.
        clean_exit = False
    
        # Rewind body position, if needed. Record current position
        # for future rewinds in the event of a redirect/retry.
        body_pos = set_file_position(body, body_pos)
    
        try:
            # Request a connection from the queue.
            timeout_obj = self._get_timeout(timeout)
            conn = self._get_conn(timeout=pool_timeout)
    
            conn.timeout = timeout_obj.connect_timeout
    
            is_new_proxy_conn = self.proxy is not None and not getattr(
                conn, "sock", None
            )
            if is_new_proxy_conn and http_tunnel_required:
                self._prepare_proxy(conn)
    
            # Make the request on the httplib connection object.
>           httplib_response = self._make_request(
                conn,
                method,
                url,
                timeout=timeout_obj,
                body=body,
                headers=headers,
                chunked=chunked,
            )

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/urllib3/connectionpool.py:703: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fcc5261c2e0>
conn = <urllib3.connection.HTTPSConnection object at 0x7fcc525032e0>
method = 'POST', url = '/test_8patlk0fzp/tops-aurora-ysopw/slack_respond_sync'
timeout = Timeout(connect=None, read=None, total=None), chunked = False
httplib_request_kw = {'body': b'{"event": {"type": "message", "channel": "1", "blocks": [{"elements": [{"text": "another test"}]}]}}', 'hea...age-Instance-Id': 'B678A8EA-433B-4EC4-B005-1782149F44F3', 'Content-Length': '100', 'Content-Type': 'application/json'}}
timeout_obj = Timeout(connect=None, read=None, total=None), read_timeout = None

    def _make_request(
        self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw
    ):
        """
        Perform a request on a given urllib connection object taken from our
        pool.
    
        :param conn:
            a connection from one of our connection pools
    
        :param timeout:
            Socket timeout in seconds for the request. This can be a
            float or integer, which will set the same timeout value for
            the socket connect and the socket read, or an instance of
            :class:`urllib3.util.Timeout`, which gives you more fine-grained
            control over your timeouts.
        """
        self.num_requests += 1
    
        timeout_obj = self._get_timeout(timeout)
        timeout_obj.start_connect()
        conn.timeout = timeout_obj.connect_timeout
    
        # Trigger any extra validation we need to do.
        try:
            self._validate_conn(conn)
        except (SocketTimeout, BaseSSLError) as e:
            # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
            self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
            raise
    
        # conn.request() calls http.client.*.request, not the method in
        # urllib3.request. It also calls makefile (recv) on the socket.
        try:
            if chunked:
                conn.request_chunked(method, url, **httplib_request_kw)
            else:
                conn.request(method, url, **httplib_request_kw)
    
        # We are swallowing BrokenPipeError (errno.EPIPE) since the server is
        # legitimately able to close the connection after sending a valid response.
        # With this behaviour, the received response is still readable.
        except BrokenPipeError:
            # Python 3
            pass
        except IOError as e:
            # Python 2 and macOS/Linux
            # EPIPE and ESHUTDOWN are BrokenPipeError on Python 2, and EPROTOTYPE is needed on macOS
            # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/
            if e.errno not in {
                errno.EPIPE,
                errno.ESHUTDOWN,
                errno.EPROTOTYPE,
            }:
                raise
    
        # Reset the timeout for the recv() on the socket
        read_timeout = timeout_obj.read_timeout
    
        # App Engine doesn't have a sock attr
        if getattr(conn, "sock", None):
            # In Python 3 socket.py will catch EAGAIN and return None when you
            # try and read into the file pointer created by http.client, which
            # instead raises a BadStatusLine exception. Instead of catching
            # the exception and assuming all BadStatusLine exceptions are read
            # timeouts, check for a zero timeout before making the request.
            if read_timeout == 0:
                raise ReadTimeoutError(
                    self, url, "Read timed out. (read timeout=%s)" % read_timeout
                )
            if read_timeout is Timeout.DEFAULT_TIMEOUT:
                conn.sock.settimeout(socket.getdefaulttimeout())
            else:  # None or a value
                conn.sock.settimeout(read_timeout)
    
        # Receive the response from the server
        try:
            try:
                # Python 2.7, use buffering of HTTP responses
                httplib_response = conn.getresponse(buffering=True)
            except TypeError:
                # Python 3
                try:
                    httplib_response = conn.getresponse()
                except BaseException as e:
                    # Remove the TypeError from the exception chain in
                    # Python 3 (including for exceptions like SystemExit).
                    # Otherwise it looks like a bug in the code.
>                   six.raise_from(e, None)

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/urllib3/connectionpool.py:449: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

value = None, from_value = None

>   ???

<string>:3: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fcc5261c2e0>
conn = <urllib3.connection.HTTPSConnection object at 0x7fcc525032e0>
method = 'POST', url = '/test_8patlk0fzp/tops-aurora-ysopw/slack_respond_sync'
timeout = Timeout(connect=None, read=None, total=None), chunked = False
httplib_request_kw = {'body': b'{"event": {"type": "message", "channel": "1", "blocks": [{"elements": [{"text": "another test"}]}]}}', 'hea...age-Instance-Id': 'B678A8EA-433B-4EC4-B005-1782149F44F3', 'Content-Length': '100', 'Content-Type': 'application/json'}}
timeout_obj = Timeout(connect=None, read=None, total=None), read_timeout = None

    def _make_request(
        self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw
    ):
        """
        Perform a request on a given urllib connection object taken from our
        pool.
    
        :param conn:
            a connection from one of our connection pools
    
        :param timeout:
            Socket timeout in seconds for the request. This can be a
            float or integer, which will set the same timeout value for
            the socket connect and the socket read, or an instance of
            :class:`urllib3.util.Timeout`, which gives you more fine-grained
            control over your timeouts.
        """
        self.num_requests += 1
    
        timeout_obj = self._get_timeout(timeout)
        timeout_obj.start_connect()
        conn.timeout = timeout_obj.connect_timeout
    
        # Trigger any extra validation we need to do.
        try:
            self._validate_conn(conn)
        except (SocketTimeout, BaseSSLError) as e:
            # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
            self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
            raise
    
        # conn.request() calls http.client.*.request, not the method in
        # urllib3.request. It also calls makefile (recv) on the socket.
        try:
            if chunked:
                conn.request_chunked(method, url, **httplib_request_kw)
            else:
                conn.request(method, url, **httplib_request_kw)
    
        # We are swallowing BrokenPipeError (errno.EPIPE) since the server is
        # legitimately able to close the connection after sending a valid response.
        # With this behaviour, the received response is still readable.
        except BrokenPipeError:
            # Python 3
            pass
        except IOError as e:
            # Python 2 and macOS/Linux
            # EPIPE and ESHUTDOWN are BrokenPipeError on Python 2, and EPROTOTYPE is needed on macOS
            # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/
            if e.errno not in {
                errno.EPIPE,
                errno.ESHUTDOWN,
                errno.EPROTOTYPE,
            }:
                raise
    
        # Reset the timeout for the recv() on the socket
        read_timeout = timeout_obj.read_timeout
    
        # App Engine doesn't have a sock attr
        if getattr(conn, "sock", None):
            # In Python 3 socket.py will catch EAGAIN and return None when you
            # try and read into the file pointer created by http.client, which
            # instead raises a BadStatusLine exception. Instead of catching
            # the exception and assuming all BadStatusLine exceptions are read
            # timeouts, check for a zero timeout before making the request.
            if read_timeout == 0:
                raise ReadTimeoutError(
                    self, url, "Read timed out. (read timeout=%s)" % read_timeout
                )
            if read_timeout is Timeout.DEFAULT_TIMEOUT:
                conn.sock.settimeout(socket.getdefaulttimeout())
            else:  # None or a value
                conn.sock.settimeout(read_timeout)
    
        # Receive the response from the server
        try:
            try:
                # Python 2.7, use buffering of HTTP responses
                httplib_response = conn.getresponse(buffering=True)
            except TypeError:
                # Python 3
                try:
>                   httplib_response = conn.getresponse()

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/urllib3/connectionpool.py:444: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connection.HTTPSConnection object at 0x7fcc525032e0>

    def getresponse(self):
        """Get the response from the server.
    
        If the HTTPConnection is in the correct state, returns an
        instance of HTTPResponse or of whatever object is returned by
        the response_class variable.
    
        If a request has not been sent or if a previous response has
        not be handled, ResponseNotReady is raised.  If the HTTP
        response indicates that the connection should be closed, then
        it will be closed before the response is returned.  When the
        connection is closed, the underlying socket is closed.
        """
    
        # if a prior response has been completed, then forget about it.
        if self.__response and self.__response.isclosed():
            self.__response = None
    
        # if a prior response exists, then it must be completed (otherwise, we
        # cannot read this response's header to determine the connection-close
        # behavior)
        #
        # note: if a prior response existed, but was connection-close, then the
        # socket and response were made independent of this HTTPConnection
        # object since a new request requires that we open a whole new
        # connection
        #
        # this means the prior response had one of two states:
        #   1) will_close: this connection was reset and the prior socket and
        #                  response operate independently
        #   2) persistent: the response was retained and we await its
        #                  isclosed() status to become true.
        #
        if self.__state != _CS_REQ_SENT or self.__response:
            raise ResponseNotReady(self.__state)
    
        if self.debuglevel > 0:
            response = self.response_class(self.sock, self.debuglevel,
                                           method=self._method)
        else:
            response = self.response_class(self.sock, method=self._method)
    
        try:
            try:
>               response.begin()

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/http/client.py:1375: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <http.client.HTTPResponse object at 0x7fcc52500430>

    def begin(self):
        if self.headers is not None:
            # we've already started reading the response
            return
    
        # read until we get a non-100 response
        while True:
>           version, status, reason = self._read_status()

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/http/client.py:318: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <http.client.HTTPResponse object at 0x7fcc52500430>

    def _read_status(self):
        line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
        if len(line) > _MAXLINE:
            raise LineTooLong("status line")
        if self.debuglevel > 0:
            print("reply:", repr(line))
        if not line:
            # Presumably, the server closed the connection before
            # sending a valid response.
>           raise RemoteDisconnected("Remote end closed connection without"
                                     " response")
E           http.client.RemoteDisconnected: Remote end closed connection without response

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/http/client.py:287: RemoteDisconnected

During handling of the above exception, another exception occurred:

self = <requests.adapters.HTTPAdapter object at 0x7fcc52337340>
request = <PreparedRequest [POST]>, stream = False
timeout = Timeout(connect=None, read=None, total=None), verify = True
cert = None, proxies = OrderedDict()

    def send(
        self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
    ):
        """Sends PreparedRequest object. Returns Response object.
    
        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
        :param stream: (optional) Whether to stream the request content.
        :param timeout: (optional) How long to wait for the server to send
            data before giving up, as a float, or a :ref:`(connect timeout,
            read timeout) <timeouts>` tuple.
        :type timeout: float or tuple or urllib3 Timeout object
        :param verify: (optional) Either a boolean, in which case it controls whether
            we verify the server's TLS certificate, or a string, in which case it
            must be a path to a CA bundle to use
        :param cert: (optional) Any user-provided SSL certificate to be trusted.
        :param proxies: (optional) The proxies dictionary to apply to the request.
        :rtype: requests.Response
        """
    
        try:
            conn = self.get_connection(request.url, proxies)
        except LocationValueError as e:
            raise InvalidURL(e, request=request)
    
        self.cert_verify(conn, request.url, verify, cert)
        url = self.request_url(request, proxies)
        self.add_headers(
            request,
            stream=stream,
            timeout=timeout,
            verify=verify,
            cert=cert,
            proxies=proxies,
        )
    
        chunked = not (request.body is None or "Content-Length" in request.headers)
    
        if isinstance(timeout, tuple):
            try:
                connect, read = timeout
                timeout = TimeoutSauce(connect=connect, read=read)
            except ValueError:
                raise ValueError(
                    f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
                    f"or a single float to set both timeouts to the same value."
                )
        elif isinstance(timeout, TimeoutSauce):
            pass
        else:
            timeout = TimeoutSauce(connect=timeout, read=timeout)
    
        try:
            if not chunked:
>               resp = conn.urlopen(
                    method=request.method,
                    url=url,
                    body=request.body,
                    headers=request.headers,
                    redirect=False,
                    assert_same_host=False,
                    preload_content=False,
                    decode_content=False,
                    retries=self.max_retries,
                    timeout=timeout,
                )

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/requests/adapters.py:489: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fcc5261c2e0>
method = 'POST', url = '/test_8patlk0fzp/tops-aurora-ysopw/slack_respond_sync'
body = b'{"event": {"type": "message", "channel": "1", "blocks": [{"elements": [{"text": "another test"}]}]}}'
headers = {'User-Agent': 'python-requests/2.28.2', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-aliv...kage-Instance-Id': 'B678A8EA-433B-4EC4-B005-1782149F44F3', 'Content-Length': '100', 'Content-Type': 'application/json'}
retries = Retry(total=0, connect=None, read=False, redirect=None, status=None)
redirect = False, assert_same_host = False
timeout = Timeout(connect=None, read=None, total=None), pool_timeout = None
release_conn = False, chunked = False, body_pos = None
response_kw = {'decode_content': False, 'preload_content': False}
parsed_url = Url(scheme=None, auth=None, host=None, port=None, path='/test_8patlk0fzp/tops-aurora-ysopw/slack_respond_sync', query=None, fragment=None)
destination_scheme = None, conn = None, release_this_conn = True
http_tunnel_required = False, err = None, clean_exit = False

    def urlopen(
        self,
        method,
        url,
        body=None,
        headers=None,
        retries=None,
        redirect=True,
        assert_same_host=True,
        timeout=_Default,
        pool_timeout=None,
        release_conn=None,
        chunked=False,
        body_pos=None,
        **response_kw
    ):
        """
        Get a connection from the pool and perform an HTTP request. This is the
        lowest level call for making a request, so you'll need to specify all
        the raw details.
    
        .. note::
    
           More commonly, it's appropriate to use a convenience method provided
           by :class:`.RequestMethods`, such as :meth:`request`.
    
        .. note::
    
           `release_conn` will only behave as expected if
           `preload_content=False` because we want to make
           `preload_content=False` the default behaviour someday soon without
           breaking backwards compatibility.
    
        :param method:
            HTTP request method (such as GET, POST, PUT, etc.)
    
        :param url:
            The URL to perform the request on.
    
        :param body:
            Data to send in the request body, either :class:`str`, :class:`bytes`,
            an iterable of :class:`str`/:class:`bytes`, or a file-like object.
    
        :param headers:
            Dictionary of custom headers to send, such as User-Agent,
            If-None-Match, etc. If None, pool headers are used. If provided,
            these headers completely replace any pool-specific headers.
    
        :param retries:
            Configure the number of retries to allow before raising a
            :class:`~urllib3.exceptions.MaxRetryError` exception.
    
            Pass ``None`` to retry until you receive a response. Pass a
            :class:`~urllib3.util.retry.Retry` object for fine-grained control
            over different types of retries.
            Pass an integer number to retry connection errors that many times,
            but no other types of errors. Pass zero to never retry.
    
            If ``False``, then retries are disabled and any exception is raised
            immediately. Also, instead of raising a MaxRetryError on redirects,
            the redirect response will be returned.
    
        :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
    
        :param redirect:
            If True, automatically handle redirects (status codes 301, 302,
            303, 307, 308). Each redirect counts as a retry. Disabling retries
            will disable redirect, too.
    
        :param assert_same_host:
            If ``True``, will make sure that the host of the pool requests is
            consistent else will raise HostChangedError. When ``False``, you can
            use the pool on an HTTP proxy and request foreign hosts.
    
        :param timeout:
            If specified, overrides the default timeout for this one
            request. It may be a float (in seconds) or an instance of
            :class:`urllib3.util.Timeout`.
    
        :param pool_timeout:
            If set and the pool is set to block=True, then this method will
            block for ``pool_timeout`` seconds and raise EmptyPoolError if no
            connection is available within the time period.
    
        :param release_conn:
            If False, then the urlopen call will not release the connection
            back into the pool once a response is received (but will release if
            you read the entire contents of the response such as when
            `preload_content=True`). This is useful if you're not preloading
            the response's content immediately. You will need to call
            ``r.release_conn()`` on the response ``r`` to return the connection
            back into the pool. If None, it takes the value of
            ``response_kw.get('preload_content', True)``.
    
        :param chunked:
            If True, urllib3 will send the body using chunked transfer
            encoding. Otherwise, urllib3 will send the body using the standard
            content-length form. Defaults to False.
    
        :param int body_pos:
            Position to seek to in file-like body in the event of a retry or
            redirect. Typically this won't need to be set because urllib3 will
            auto-populate the value when needed.
    
        :param \\**response_kw:
            Additional parameters are passed to
            :meth:`urllib3.response.HTTPResponse.from_httplib`
        """
    
        parsed_url = parse_url(url)
        destination_scheme = parsed_url.scheme
    
        if headers is None:
            headers = self.headers
    
        if not isinstance(retries, Retry):
            retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
    
        if release_conn is None:
            release_conn = response_kw.get("preload_content", True)
    
        # Check host
        if assert_same_host and not self.is_same_host(url):
            raise HostChangedError(self, url, retries)
    
        # Ensure that the URL we're connecting to is properly encoded
        if url.startswith("/"):
            url = six.ensure_str(_encode_target(url))
        else:
            url = six.ensure_str(parsed_url.url)
    
        conn = None
    
        # Track whether `conn` needs to be released before
        # returning/raising/recursing. Update this variable if necessary, and
        # leave `release_conn` constant throughout the function. That way, if
        # the function recurses, the original value of `release_conn` will be
        # passed down into the recursive call, and its value will be respected.
        #
        # See issue #651 [1] for details.
        #
        # [1] <https://github.com/urllib3/urllib3/issues/651>
        release_this_conn = release_conn
    
        http_tunnel_required = connection_requires_http_tunnel(
            self.proxy, self.proxy_config, destination_scheme
        )
    
        # Merge the proxy headers. Only done when not using HTTP CONNECT. We
        # have to copy the headers dict so we can safely change it without those
        # changes being reflected in anyone else's copy.
        if not http_tunnel_required:
            headers = headers.copy()
            headers.update(self.proxy_headers)
    
        # Must keep the exception bound to a separate variable or else Python 3
        # complains about UnboundLocalError.
        err = None
    
        # Keep track of whether we cleanly exited the except block. This
        # ensures we do proper cleanup in finally.
        clean_exit = False
    
        # Rewind body position, if needed. Record current position
        # for future rewinds in the event of a redirect/retry.
        body_pos = set_file_position(body, body_pos)
    
        try:
            # Request a connection from the queue.
            timeout_obj = self._get_timeout(timeout)
            conn = self._get_conn(timeout=pool_timeout)
    
            conn.timeout = timeout_obj.connect_timeout
    
            is_new_proxy_conn = self.proxy is not None and not getattr(
                conn, "sock", None
            )
            if is_new_proxy_conn and http_tunnel_required:
                self._prepare_proxy(conn)
    
            # Make the request on the httplib connection object.
            httplib_response = self._make_request(
                conn,
                method,
                url,
                timeout=timeout_obj,
                body=body,
                headers=headers,
                chunked=chunked,
            )
    
            # If we're going to release the connection in ``finally:``, then
            # the response doesn't need to know about the connection. Otherwise
            # it will also try to release it and we'll have a double-release
            # mess.
            response_conn = conn if not release_conn else None
    
            # Pass method to Response for length checking
            response_kw["request_method"] = method
    
            # Import httplib's response into our own wrapper object
            response = self.ResponseCls.from_httplib(
                httplib_response,
                pool=self,
                connection=response_conn,
                retries=retries,
                **response_kw
            )
    
            # Everything went great!
            clean_exit = True
    
        except EmptyPoolError:
            # Didn't get a connection from the pool, no need to clean up
            clean_exit = True
            release_this_conn = False
            raise
    
        except (
            TimeoutError,
            HTTPException,
            SocketError,
            ProtocolError,
            BaseSSLError,
            SSLError,
            CertificateError,
        ) as e:
            # Discard the connection for these exceptions. It will be
            # replaced during the next _get_conn() call.
            clean_exit = False
    
            def _is_ssl_error_message_from_http_proxy(ssl_error):
                # We're trying to detect the message 'WRONG_VERSION_NUMBER' but
                # SSLErrors are kinda all over the place when it comes to the message,
                # so we try to cover our bases here!
                message = " ".join(re.split("[^a-z]", str(ssl_error).lower()))
                return (
                    "wrong version number" in message or "unknown protocol" in message
                )
    
            # Try to detect a common user error with proxies which is to
            # set an HTTP proxy to be HTTPS when it should be 'http://'
            # (ie {'http': 'http://proxy', 'https': 'https://proxy'})
            # Instead we add a nice error message and point to a URL.
            if (
                isinstance(e, BaseSSLError)
                and self.proxy
                and _is_ssl_error_message_from_http_proxy(e)
            ):
                e = ProxyError(
                    "Your proxy appears to only use HTTP and not HTTPS, "
                    "try changing your proxy URL to be HTTP. See: "
                    "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
                    "#https-proxy-error-http-proxy",
                    SSLError(e),
                )
            elif isinstance(e, (BaseSSLError, CertificateError)):
                e = SSLError(e)
            elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
                e = ProxyError("Cannot connect to proxy.", e)
            elif isinstance(e, (SocketError, HTTPException)):
                e = ProtocolError("Connection aborted.", e)
    
>           retries = retries.increment(
                method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]
            )

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/urllib3/connectionpool.py:785: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = Retry(total=0, connect=None, read=False, redirect=None, status=None)
method = 'POST', url = '/test_8patlk0fzp/tops-aurora-ysopw/slack_respond_sync'
response = None
error = ProtocolError('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))
_pool = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fcc5261c2e0>
_stacktrace = <traceback object at 0x7fcc526b51c0>

    def increment(
        self,
        method=None,
        url=None,
        response=None,
        error=None,
        _pool=None,
        _stacktrace=None,
    ):
        """Return a new Retry object with incremented retry counters.
    
        :param response: A response object, or None, if the server did not
            return a response.
        :type response: :class:`~urllib3.response.HTTPResponse`
        :param Exception error: An error encountered during the request, or
            None if the response was received successfully.
    
        :return: A new ``Retry`` object.
        """
        if self.total is False and error:
            # Disabled, indicate to re-raise the error.
            raise six.reraise(type(error), error, _stacktrace)
    
        total = self.total
        if total is not None:
            total -= 1
    
        connect = self.connect
        read = self.read
        redirect = self.redirect
        status_count = self.status
        other = self.other
        cause = "unknown"
        status = None
        redirect_location = None
    
        if error and self._is_connection_error(error):
            # Connect retry?
            if connect is False:
                raise six.reraise(type(error), error, _stacktrace)
            elif connect is not None:
                connect -= 1
    
        elif error and self._is_read_error(error):
            # Read retry?
            if read is False or not self._is_method_retryable(method):
>               raise six.reraise(type(error), error, _stacktrace)

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/urllib3/util/retry.py:550: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

tp = <class 'urllib3.exceptions.ProtocolError'>, value = None, tb = None

    def reraise(tp, value, tb=None):
        try:
            if value is None:
                value = tp()
            if value.__traceback__ is not tb:
>               raise value.with_traceback(tb)

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/urllib3/packages/six.py:769: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fcc5261c2e0>
method = 'POST', url = '/test_8patlk0fzp/tops-aurora-ysopw/slack_respond_sync'
body = b'{"event": {"type": "message", "channel": "1", "blocks": [{"elements": [{"text": "another test"}]}]}}'
headers = {'User-Agent': 'python-requests/2.28.2', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-aliv...kage-Instance-Id': 'B678A8EA-433B-4EC4-B005-1782149F44F3', 'Content-Length': '100', 'Content-Type': 'application/json'}
retries = Retry(total=0, connect=None, read=False, redirect=None, status=None)
redirect = False, assert_same_host = False
timeout = Timeout(connect=None, read=None, total=None), pool_timeout = None
release_conn = False, chunked = False, body_pos = None
response_kw = {'decode_content': False, 'preload_content': False}
parsed_url = Url(scheme=None, auth=None, host=None, port=None, path='/test_8patlk0fzp/tops-aurora-ysopw/slack_respond_sync', query=None, fragment=None)
destination_scheme = None, conn = None, release_this_conn = True
http_tunnel_required = False, err = None, clean_exit = False

    def urlopen(
        self,
        method,
        url,
        body=None,
        headers=None,
        retries=None,
        redirect=True,
        assert_same_host=True,
        timeout=_Default,
        pool_timeout=None,
        release_conn=None,
        chunked=False,
        body_pos=None,
        **response_kw
    ):
        """
        Get a connection from the pool and perform an HTTP request. This is the
        lowest level call for making a request, so you'll need to specify all
        the raw details.
    
        .. note::
    
           More commonly, it's appropriate to use a convenience method provided
           by :class:`.RequestMethods`, such as :meth:`request`.
    
        .. note::
    
           `release_conn` will only behave as expected if
           `preload_content=False` because we want to make
           `preload_content=False` the default behaviour someday soon without
           breaking backwards compatibility.
    
        :param method:
            HTTP request method (such as GET, POST, PUT, etc.)
    
        :param url:
            The URL to perform the request on.
    
        :param body:
            Data to send in the request body, either :class:`str`, :class:`bytes`,
            an iterable of :class:`str`/:class:`bytes`, or a file-like object.
    
        :param headers:
            Dictionary of custom headers to send, such as User-Agent,
            If-None-Match, etc. If None, pool headers are used. If provided,
            these headers completely replace any pool-specific headers.
    
        :param retries:
            Configure the number of retries to allow before raising a
            :class:`~urllib3.exceptions.MaxRetryError` exception.
    
            Pass ``None`` to retry until you receive a response. Pass a
            :class:`~urllib3.util.retry.Retry` object for fine-grained control
            over different types of retries.
            Pass an integer number to retry connection errors that many times,
            but no other types of errors. Pass zero to never retry.
    
            If ``False``, then retries are disabled and any exception is raised
            immediately. Also, instead of raising a MaxRetryError on redirects,
            the redirect response will be returned.
    
        :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
    
        :param redirect:
            If True, automatically handle redirects (status codes 301, 302,
            303, 307, 308). Each redirect counts as a retry. Disabling retries
            will disable redirect, too.
    
        :param assert_same_host:
            If ``True``, will make sure that the host of the pool requests is
            consistent else will raise HostChangedError. When ``False``, you can
            use the pool on an HTTP proxy and request foreign hosts.
    
        :param timeout:
            If specified, overrides the default timeout for this one
            request. It may be a float (in seconds) or an instance of
            :class:`urllib3.util.Timeout`.
    
        :param pool_timeout:
            If set and the pool is set to block=True, then this method will
            block for ``pool_timeout`` seconds and raise EmptyPoolError if no
            connection is available within the time period.
    
        :param release_conn:
            If False, then the urlopen call will not release the connection
            back into the pool once a response is received (but will release if
            you read the entire contents of the response such as when
            `preload_content=True`). This is useful if you're not preloading
            the response's content immediately. You will need to call
            ``r.release_conn()`` on the response ``r`` to return the connection
            back into the pool. If None, it takes the value of
            ``response_kw.get('preload_content', True)``.
    
        :param chunked:
            If True, urllib3 will send the body using chunked transfer
            encoding. Otherwise, urllib3 will send the body using the standard
            content-length form. Defaults to False.
    
        :param int body_pos:
            Position to seek to in file-like body in the event of a retry or
            redirect. Typically this won't need to be set because urllib3 will
            auto-populate the value when needed.
    
        :param \\**response_kw:
            Additional parameters are passed to
            :meth:`urllib3.response.HTTPResponse.from_httplib`
        """
    
        parsed_url = parse_url(url)
        destination_scheme = parsed_url.scheme
    
        if headers is None:
            headers = self.headers
    
        if not isinstance(retries, Retry):
            retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
    
        if release_conn is None:
            release_conn = response_kw.get("preload_content", True)
    
        # Check host
        if assert_same_host and not self.is_same_host(url):
            raise HostChangedError(self, url, retries)
    
        # Ensure that the URL we're connecting to is properly encoded
        if url.startswith("/"):
            url = six.ensure_str(_encode_target(url))
        else:
            url = six.ensure_str(parsed_url.url)
    
        conn = None
    
        # Track whether `conn` needs to be released before
        # returning/raising/recursing. Update this variable if necessary, and
        # leave `release_conn` constant throughout the function. That way, if
        # the function recurses, the original value of `release_conn` will be
        # passed down into the recursive call, and its value will be respected.
        #
        # See issue #651 [1] for details.
        #
        # [1] <https://github.com/urllib3/urllib3/issues/651>
        release_this_conn = release_conn
    
        http_tunnel_required = connection_requires_http_tunnel(
            self.proxy, self.proxy_config, destination_scheme
        )
    
        # Merge the proxy headers. Only done when not using HTTP CONNECT. We
        # have to copy the headers dict so we can safely change it without those
        # changes being reflected in anyone else's copy.
        if not http_tunnel_required:
            headers = headers.copy()
            headers.update(self.proxy_headers)
    
        # Must keep the exception bound to a separate variable or else Python 3
        # complains about UnboundLocalError.
        err = None
    
        # Keep track of whether we cleanly exited the except block. This
        # ensures we do proper cleanup in finally.
        clean_exit = False
    
        # Rewind body position, if needed. Record current position
        # for future rewinds in the event of a redirect/retry.
        body_pos = set_file_position(body, body_pos)
    
        try:
            # Request a connection from the queue.
            timeout_obj = self._get_timeout(timeout)
            conn = self._get_conn(timeout=pool_timeout)
    
            conn.timeout = timeout_obj.connect_timeout
    
            is_new_proxy_conn = self.proxy is not None and not getattr(
                conn, "sock", None
            )
            if is_new_proxy_conn and http_tunnel_required:
                self._prepare_proxy(conn)
    
            # Make the request on the httplib connection object.
>           httplib_response = self._make_request(
                conn,
                method,
                url,
                timeout=timeout_obj,
                body=body,
                headers=headers,
                chunked=chunked,
            )

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/urllib3/connectionpool.py:703: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fcc5261c2e0>
conn = <urllib3.connection.HTTPSConnection object at 0x7fcc525032e0>
method = 'POST', url = '/test_8patlk0fzp/tops-aurora-ysopw/slack_respond_sync'
timeout = Timeout(connect=None, read=None, total=None), chunked = False
httplib_request_kw = {'body': b'{"event": {"type": "message", "channel": "1", "blocks": [{"elements": [{"text": "another test"}]}]}}', 'hea...age-Instance-Id': 'B678A8EA-433B-4EC4-B005-1782149F44F3', 'Content-Length': '100', 'Content-Type': 'application/json'}}
timeout_obj = Timeout(connect=None, read=None, total=None), read_timeout = None

    def _make_request(
        self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw
    ):
        """
        Perform a request on a given urllib connection object taken from our
        pool.
    
        :param conn:
            a connection from one of our connection pools
    
        :param timeout:
            Socket timeout in seconds for the request. This can be a
            float or integer, which will set the same timeout value for
            the socket connect and the socket read, or an instance of
            :class:`urllib3.util.Timeout`, which gives you more fine-grained
            control over your timeouts.
        """
        self.num_requests += 1
    
        timeout_obj = self._get_timeout(timeout)
        timeout_obj.start_connect()
        conn.timeout = timeout_obj.connect_timeout
    
        # Trigger any extra validation we need to do.
        try:
            self._validate_conn(conn)
        except (SocketTimeout, BaseSSLError) as e:
            # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
            self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
            raise
    
        # conn.request() calls http.client.*.request, not the method in
        # urllib3.request. It also calls makefile (recv) on the socket.
        try:
            if chunked:
                conn.request_chunked(method, url, **httplib_request_kw)
            else:
                conn.request(method, url, **httplib_request_kw)
    
        # We are swallowing BrokenPipeError (errno.EPIPE) since the server is
        # legitimately able to close the connection after sending a valid response.
        # With this behaviour, the received response is still readable.
        except BrokenPipeError:
            # Python 3
            pass
        except IOError as e:
            # Python 2 and macOS/Linux
            # EPIPE and ESHUTDOWN are BrokenPipeError on Python 2, and EPROTOTYPE is needed on macOS
            # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/
            if e.errno not in {
                errno.EPIPE,
                errno.ESHUTDOWN,
                errno.EPROTOTYPE,
            }:
                raise
    
        # Reset the timeout for the recv() on the socket
        read_timeout = timeout_obj.read_timeout
    
        # App Engine doesn't have a sock attr
        if getattr(conn, "sock", None):
            # In Python 3 socket.py will catch EAGAIN and return None when you
            # try and read into the file pointer created by http.client, which
            # instead raises a BadStatusLine exception. Instead of catching
            # the exception and assuming all BadStatusLine exceptions are read
            # timeouts, check for a zero timeout before making the request.
            if read_timeout == 0:
                raise ReadTimeoutError(
                    self, url, "Read timed out. (read timeout=%s)" % read_timeout
                )
            if read_timeout is Timeout.DEFAULT_TIMEOUT:
                conn.sock.settimeout(socket.getdefaulttimeout())
            else:  # None or a value
                conn.sock.settimeout(read_timeout)
    
        # Receive the response from the server
        try:
            try:
                # Python 2.7, use buffering of HTTP responses
                httplib_response = conn.getresponse(buffering=True)
            except TypeError:
                # Python 3
                try:
                    httplib_response = conn.getresponse()
                except BaseException as e:
                    # Remove the TypeError from the exception chain in
                    # Python 3 (including for exceptions like SystemExit).
                    # Otherwise it looks like a bug in the code.
>                   six.raise_from(e, None)

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/urllib3/connectionpool.py:449: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

value = None, from_value = None

>   ???

<string>:3: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fcc5261c2e0>
conn = <urllib3.connection.HTTPSConnection object at 0x7fcc525032e0>
method = 'POST', url = '/test_8patlk0fzp/tops-aurora-ysopw/slack_respond_sync'
timeout = Timeout(connect=None, read=None, total=None), chunked = False
httplib_request_kw = {'body': b'{"event": {"type": "message", "channel": "1", "blocks": [{"elements": [{"text": "another test"}]}]}}', 'hea...age-Instance-Id': 'B678A8EA-433B-4EC4-B005-1782149F44F3', 'Content-Length': '100', 'Content-Type': 'application/json'}}
timeout_obj = Timeout(connect=None, read=None, total=None), read_timeout = None

    def _make_request(
        self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw
    ):
        """
        Perform a request on a given urllib connection object taken from our
        pool.
    
        :param conn:
            a connection from one of our connection pools
    
        :param timeout:
            Socket timeout in seconds for the request. This can be a
            float or integer, which will set the same timeout value for
            the socket connect and the socket read, or an instance of
            :class:`urllib3.util.Timeout`, which gives you more fine-grained
            control over your timeouts.
        """
        self.num_requests += 1
    
        timeout_obj = self._get_timeout(timeout)
        timeout_obj.start_connect()
        conn.timeout = timeout_obj.connect_timeout
    
        # Trigger any extra validation we need to do.
        try:
            self._validate_conn(conn)
        except (SocketTimeout, BaseSSLError) as e:
            # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
            self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
            raise
    
        # conn.request() calls http.client.*.request, not the method in
        # urllib3.request. It also calls makefile (recv) on the socket.
        try:
            if chunked:
                conn.request_chunked(method, url, **httplib_request_kw)
            else:
                conn.request(method, url, **httplib_request_kw)
    
        # We are swallowing BrokenPipeError (errno.EPIPE) since the server is
        # legitimately able to close the connection after sending a valid response.
        # With this behaviour, the received response is still readable.
        except BrokenPipeError:
            # Python 3
            pass
        except IOError as e:
            # Python 2 and macOS/Linux
            # EPIPE and ESHUTDOWN are BrokenPipeError on Python 2, and EPROTOTYPE is needed on macOS
            # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/
            if e.errno not in {
                errno.EPIPE,
                errno.ESHUTDOWN,
                errno.EPROTOTYPE,
            }:
                raise
    
        # Reset the timeout for the recv() on the socket
        read_timeout = timeout_obj.read_timeout
    
        # App Engine doesn't have a sock attr
        if getattr(conn, "sock", None):
            # In Python 3 socket.py will catch EAGAIN and return None when you
            # try and read into the file pointer created by http.client, which
            # instead raises a BadStatusLine exception. Instead of catching
            # the exception and assuming all BadStatusLine exceptions are read
            # timeouts, check for a zero timeout before making the request.
            if read_timeout == 0:
                raise ReadTimeoutError(
                    self, url, "Read timed out. (read timeout=%s)" % read_timeout
                )
            if read_timeout is Timeout.DEFAULT_TIMEOUT:
                conn.sock.settimeout(socket.getdefaulttimeout())
            else:  # None or a value
                conn.sock.settimeout(read_timeout)
    
        # Receive the response from the server
        try:
            try:
                # Python 2.7, use buffering of HTTP responses
                httplib_response = conn.getresponse(buffering=True)
            except TypeError:
                # Python 3
                try:
>                   httplib_response = conn.getresponse()

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/urllib3/connectionpool.py:444: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connection.HTTPSConnection object at 0x7fcc525032e0>

    def getresponse(self):
        """Get the response from the server.
    
        If the HTTPConnection is in the correct state, returns an
        instance of HTTPResponse or of whatever object is returned by
        the response_class variable.
    
        If a request has not been sent or if a previous response has
        not be handled, ResponseNotReady is raised.  If the HTTP
        response indicates that the connection should be closed, then
        it will be closed before the response is returned.  When the
        connection is closed, the underlying socket is closed.
        """
    
        # if a prior response has been completed, then forget about it.
        if self.__response and self.__response.isclosed():
            self.__response = None
    
        # if a prior response exists, then it must be completed (otherwise, we
        # cannot read this response's header to determine the connection-close
        # behavior)
        #
        # note: if a prior response existed, but was connection-close, then the
        # socket and response were made independent of this HTTPConnection
        # object since a new request requires that we open a whole new
        # connection
        #
        # this means the prior response had one of two states:
        #   1) will_close: this connection was reset and the prior socket and
        #                  response operate independently
        #   2) persistent: the response was retained and we await its
        #                  isclosed() status to become true.
        #
        if self.__state != _CS_REQ_SENT or self.__response:
            raise ResponseNotReady(self.__state)
    
        if self.debuglevel > 0:
            response = self.response_class(self.sock, self.debuglevel,
                                           method=self._method)
        else:
            response = self.response_class(self.sock, method=self._method)
    
        try:
            try:
>               response.begin()

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/http/client.py:1375: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <http.client.HTTPResponse object at 0x7fcc52500430>

    def begin(self):
        if self.headers is not None:
            # we've already started reading the response
            return
    
        # read until we get a non-100 response
        while True:
>           version, status, reason = self._read_status()

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/http/client.py:318: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <http.client.HTTPResponse object at 0x7fcc52500430>

    def _read_status(self):
        line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
        if len(line) > _MAXLINE:
            raise LineTooLong("status line")
        if self.debuglevel > 0:
            print("reply:", repr(line))
        if not line:
            # Presumably, the server closed the connection before
            # sending a valid response.
>           raise RemoteDisconnected("Remote end closed connection without"
                                     " response")
E           urllib3.exceptions.ProtocolError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/http/client.py:287: ProtocolError

During handling of the above exception, another exception occurred:

client = Steamship(config=Configuration(api_key=SecretStr('**********'), api_base=AnyHttpUrl('https://api.steamship.com/api/v1/...kspace_id='FCE5A639-5BE4-42C2-B734-C6C4DCD60902', workspace_handle='test_8patlk0fzp', profile='test', request_id=None))

    @pytest.mark.usefixtures("client")
    def test_slack(client: Steamship):
        mock_slack_api_path = PACKAGES_PATH / "transports" / "mock_slack_api.py"
        transport_agent_path = PACKAGES_PATH / "transports" / "test_transports_agent.py"
    
        with deploy_package(client, mock_slack_api_path) as (_, _, mock_chat_api):
    
            # Set the slack_api_base to the mock Slack API we just deployed.
            # This will allow us to test for the proper receipt of messages.
            instance_config = {
                "slack_api_base": mock_chat_api.invocation_url,
                "telegram_token": "",
                "telegram_api_base": "",
            }
    
            with deploy_package(
                client,
                transport_agent_path,
                version_config_template=config_template,
                instance_config=instance_config,
                wait_for_init=True,
            ) as (_, _, agent_instance):
                # Set the bot token
                is_token_set_no = agent_instance.invoke("is_slack_token_set")
                assert is_token_set_no is False
    
                agent_instance.invoke("set_slack_access_token", token="")  # noqa: S106
                is_token_set_true = agent_instance.invoke("is_slack_token_set")
                assert is_token_set_true is True
    
                # Note: this is the synchronous respond method which is easier to test.
                # The actual Slack webhook calls this async in order to respond within Slack's required latency.
                respond_method = "slack_respond_sync"
    
                # Set the response URL
                response_url = agent_instance.invocation_url
                if not response_url.endswith("/"):
                    response_url = response_url + "/"
                response_url = response_url + respond_method
    
                # Test sending messages (without auth)
                response = requests.post(
                    url=response_url,
                    json=MockSlackApi.generate_inbound_webhook_body("a test"),
                )
                assert response.ok
                files = File.query(client, f'kind "{MockSlackApi.TEXT_MESSAGE_TAG}"').files
                assert len(files) == 1
                allowed_responses = ["Response to: a test", "Response to: a test".replace(" ", "+")]
                assert (
                    files[0].tags[0].name in allowed_responses
                )  # bug somewhere - results being url encoded in some envs
                assert files[0].tags[0].value == {MockSlackApi.CHAT_ID_KEY: "1"}
    
                # test sending another message; this has been a problem before
>               agent_instance.invoke(
                    respond_method, **MockSlackApi.generate_inbound_webhook_body("another test")
                )

tests/steamship_tests/agents/transports/test_slack.py:71: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
src/steamship/data/package/package_instance.py:149: in invoke
    return self.client.call(
src/steamship/base/client.py:472: in call
    resp = self._session.post(url, json=data, headers=headers, timeout=timeout_s)
/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/requests/sessions.py:635: in post
    return self.request("POST", url, data=data, json=json, **kwargs)
/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/requests/sessions.py:587: in request
    resp = self.send(prep, **send_kwargs)
/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/requests/sessions.py:701: in send
    r = adapter.send(request, **kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <requests.adapters.HTTPAdapter object at 0x7fcc52337340>
request = <Pr

Check failure on line 15 in tests/steamship_tests/app/integration/test_header_passing.py

See this annotation in the file changed.

@github-actions github-actions / Production Test Results

test_header_passing.test_instance_init_is_called_on_package

steamship.base.error.SteamshipError: Failure attempting to authenticate user via Database auth:
Raw output
client = Steamship(config=Configuration(api_key=SecretStr('**********'), api_base=AnyHttpUrl('https://api.steamship.com/api/v1/...kspace_id='9A6E1908-9523-4894-B19E-615424CEC1D5', workspace_handle='test_alxtxovuhw', profile='test', request_id=None))

    @pytest.mark.usefixtures("client")
    def test_instance_init_is_called_on_package(client: Steamship):
        demo_package_path = PACKAGES_PATH / "echo_test_header.py"
    
>       with deploy_package(client, demo_package_path) as (_, _, instance):

tests/steamship_tests/app/integration/test_header_passing.py:15: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/contextlib.py:135: in __enter__
    return next(self.gen)
tests/steamship_tests/utils/deployables.py:130: in deploy_package
    version = PackageVersion.create(
src/steamship/data/package/package_version.py:68: in create
    task.wait_until_completed()
src/steamship/base/tasks.py:299: in wait_until_completed
    return self.wait(
src/steamship/base/tasks.py:268: in wait
    self.refresh()
src/steamship/base/tasks.py:313: in refresh
    resp = self.client.post("task/status", payload=req, expect=self.expect)
src/steamship/base/client.py:579: in post
    return self.call(
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = Steamship(config=Configuration(api_key=SecretStr('**********'), api_base=AnyHttpUrl('https://api.steamship.com/api/v1/...kspace_id='9A6E1908-9523-4894-B19E-615424CEC1D5', workspace_handle='test_alxtxovuhw', profile='test', request_id=None))
verb = <Verb.POST: 'POST'>, operation = 'task/status'
payload = TaskStatusRequest(task_id='197E3822-B634-4783-ABEC-9B0A0D50799B')
file = None
expect = <class 'steamship.data.package.package_version.PackageVersion'>
debug = False, raw_response = False, is_package_call = False
package_owner = None, package_id = None, package_instance_id = None
as_background_task = False, wait_on_tasks = None, timeout_s = None
task_delay_ms = None

    def call(  # noqa: C901
        self,
        verb: Verb,
        operation: str,
        payload: Union[Request, dict, bytes] = None,
        file: Any = None,
        expect: Type[T] = None,
        debug: bool = False,
        raw_response: bool = False,
        is_package_call: bool = False,
        package_owner: str = None,
        package_id: str = None,
        package_instance_id: str = None,
        as_background_task: bool = False,
        wait_on_tasks: List[Union[str, Task]] = None,
        timeout_s: Optional[float] = None,
        task_delay_ms: Optional[int] = None,
    ) -> Union[
        Any, Task
    ]:  # TODO (enias): I would like to list all possible return types using interfaces instead of Any
        """Post to the Steamship API.
    
        All responses have the format::
    
        .. code-block:: json
    
           {
               "data": "<actual response>",
               "error": {"reason": "<message>"}
           } # noqa: RST203
    
        For the Python client we return the contents of the `data` field if present, and we raise an exception
        if the `error` field is filled in.
        """
        # TODO (enias): Review this codebase
        url = self._url(
            is_package_call=is_package_call,
            package_owner=package_owner,
            operation=operation,
        )
    
        headers = self._headers(
            is_package_call=is_package_call,
            package_owner=package_owner,
            package_id=package_id,
            package_instance_id=package_instance_id,
            as_background_task=as_background_task,
            wait_on_tasks=wait_on_tasks,
            task_delay_ms=task_delay_ms,
        )
    
        data = self._prepare_data(payload=payload)
    
        logging.debug(
            f"Making {verb} to {url} in workspace {self.config.workspace_handle}/{self.config.workspace_id}"
        )
        if verb == Verb.POST:
            if file is not None:
                files = self._prepare_multipart_data(data, file)
                resp = self._session.post(url, files=files, headers=headers, timeout=timeout_s)
            else:
                if isinstance(data, bytes):
                    resp = self._session.post(url, data=data, headers=headers, timeout=timeout_s)
                else:
                    resp = self._session.post(url, json=data, headers=headers, timeout=timeout_s)
        elif verb == Verb.GET:
            resp = self._session.get(url, params=data, headers=headers, timeout=timeout_s)
        else:
            raise Exception(f"Unsupported verb: {verb}")
    
        logging.debug(f"From {verb} to {url} got HTTP {resp.status_code}")
    
        if debug is True:
            logging.debug(f"Got response {resp}")
    
        response_data = self._response_data(resp, raw_response=raw_response)
    
        logging.debug(f"Response JSON {response_data}")
    
        task = None
        error = None
    
        if isinstance(response_data, dict):
            if "status" in response_data:
                try:
                    task = Task.parse_obj(
                        {**response_data["status"], "client": self, "expect": expect}
                    )
                    if "state" in response_data["status"]:
                        if response_data["status"]["state"] == "failed":
                            error = SteamshipError.from_dict(response_data["status"])
                            logging.warning(f"Client received error from server: {error}")
                except TypeError as e:
                    # There's an edge case here -- if a Steamship package returns the JSON dictionary
                    #
                    # { "status": "status string" }
                    #
                    # Then the above handler will attempt to parse it and throw... But we don't actually want to throw
                    # since we don't take a strong opinion on what the response type of a package endpoint ought to be.
                    # It *may* choose to conform to the SteamshipResponse<T> type, but it doesn't have to.
                    if not is_package_call:
                        raise e
    
                if task is not None and task.state == TaskState.failed:
                    error = task.as_error()
    
            if "data" in response_data:
                if expect is not None:
                    if issubclass(expect, SteamshipError):
                        data = expect.from_dict({**response_data["data"], "client": self})
                    elif issubclass(expect, BaseModel):
                        data = expect.parse_obj(
                            self._add_client_to_response(expect, response_data["data"])
                        )
                    else:
                        raise RuntimeError(f"obj of type {expect} does not have a from_dict method")
                else:
                    data = response_data["data"]
    
                if task:
                    task.output = data
            else:
                data = response_data
    
        else:
            data = response_data
    
        if error is not None:
            logging.warning(f"Client received error from server: {error}", exc_info=error)
>           raise error
E           steamship.base.error.SteamshipError: Failure attempting to authenticate user via Database auth:

src/steamship/base/client.py:537: SteamshipError

Check failure on line 29 in tests/steamship_tests/data/test_chat.py

See this annotation in the file changed.

@github-actions github-actions / Production Test Results

test_chat.test_chat_append_system

steamship.base.error.SteamshipError: [APIError]
While attempting to embed records, at least one embedding attempt failed. Wrapped Message: There was an error embedding simdex record with id Optional(D508E7D6-480E-4A87-B651-BD1E0B68748A) Wrapped Message: Response from remote not OK. Status = 503
Raw output
client = Steamship(config=Configuration(api_key=SecretStr('**********'), api_base=AnyHttpUrl('https://api.steamship.com/api/v1/...kspace_id='FD006417-6A88-4AC7-9AA1-1513F29FFD83', workspace_handle='test_autiqkjmpv', profile='test', request_id=None))

    @pytest.mark.usefixtures("client")
    def test_chat_append_system(client: Steamship):
        chat = ChatHistory.get_or_create(client, context_keys={"test_id": uuid.uuid4().hex})
    
>       chat.append_system_message(text="some system text")

tests/steamship_tests/data/test_chat.py:29: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
src/steamship/agents/schema/chathistory.py:179: in append_system_message
    return self.append_message_with_role(text, RoleTag.SYSTEM, tags, content, url, mime_type)
src/steamship/agents/schema/chathistory.py:156: in append_message_with_role
    self.embedding_index.insert(chunk_tags)
src/steamship/data/plugin/index_plugin_instance.py:163: in insert
    self.index.insert_many(embedded_items, reindex=True, allow_long_records=allow_long_records)
src/steamship/data/embeddings.py:216: in insert_many
    return self.client.post(
src/steamship/base/client.py:579: in post
    return self.call(
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = Steamship(config=Configuration(api_key=SecretStr('**********'), api_base=AnyHttpUrl('https://api.steamship.com/api/v1/...kspace_id='FD006417-6A88-4AC7-9AA1-1513F29FFD83', workspace_handle='test_autiqkjmpv', profile='test', request_id=None))
verb = <Verb.POST: 'POST'>, operation = 'embedding-index/item/create'
payload = IndexInsertRequest(index_id='ADFA479F-3C96-4A50-8935-67D7BBE4C918', items=[EmbeddedItem(id=None, index_id=None, file_i...g=None)], value=None, file_id=None, block_type=None, external_id=None, external_type=None, metadata=None, reindex=True)
file = None, expect = <class 'steamship.data.embeddings.IndexInsertResponse'>
debug = False, raw_response = False, is_package_call = False
package_owner = None, package_id = None, package_instance_id = None
as_background_task = False, wait_on_tasks = None, timeout_s = None
task_delay_ms = None

    def call(  # noqa: C901
        self,
        verb: Verb,
        operation: str,
        payload: Union[Request, dict, bytes] = None,
        file: Any = None,
        expect: Type[T] = None,
        debug: bool = False,
        raw_response: bool = False,
        is_package_call: bool = False,
        package_owner: str = None,
        package_id: str = None,
        package_instance_id: str = None,
        as_background_task: bool = False,
        wait_on_tasks: List[Union[str, Task]] = None,
        timeout_s: Optional[float] = None,
        task_delay_ms: Optional[int] = None,
    ) -> Union[
        Any, Task
    ]:  # TODO (enias): I would like to list all possible return types using interfaces instead of Any
        """Post to the Steamship API.
    
        All responses have the format::
    
        .. code-block:: json
    
           {
               "data": "<actual response>",
               "error": {"reason": "<message>"}
           } # noqa: RST203
    
        For the Python client we return the contents of the `data` field if present, and we raise an exception
        if the `error` field is filled in.
        """
        # TODO (enias): Review this codebase
        url = self._url(
            is_package_call=is_package_call,
            package_owner=package_owner,
            operation=operation,
        )
    
        headers = self._headers(
            is_package_call=is_package_call,
            package_owner=package_owner,
            package_id=package_id,
            package_instance_id=package_instance_id,
            as_background_task=as_background_task,
            wait_on_tasks=wait_on_tasks,
            task_delay_ms=task_delay_ms,
        )
    
        data = self._prepare_data(payload=payload)
    
        logging.debug(
            f"Making {verb} to {url} in workspace {self.config.workspace_handle}/{self.config.workspace_id}"
        )
        if verb == Verb.POST:
            if file is not None:
                files = self._prepare_multipart_data(data, file)
                resp = self._session.post(url, files=files, headers=headers, timeout=timeout_s)
            else:
                if isinstance(data, bytes):
                    resp = self._session.post(url, data=data, headers=headers, timeout=timeout_s)
                else:
                    resp = self._session.post(url, json=data, headers=headers, timeout=timeout_s)
        elif verb == Verb.GET:
            resp = self._session.get(url, params=data, headers=headers, timeout=timeout_s)
        else:
            raise Exception(f"Unsupported verb: {verb}")
    
        logging.debug(f"From {verb} to {url} got HTTP {resp.status_code}")
    
        if debug is True:
            logging.debug(f"Got response {resp}")
    
        response_data = self._response_data(resp, raw_response=raw_response)
    
        logging.debug(f"Response JSON {response_data}")
    
        task = None
        error = None
    
        if isinstance(response_data, dict):
            if "status" in response_data:
                try:
                    task = Task.parse_obj(
                        {**response_data["status"], "client": self, "expect": expect}
                    )
                    if "state" in response_data["status"]:
                        if response_data["status"]["state"] == "failed":
                            error = SteamshipError.from_dict(response_data["status"])
                            logging.warning(f"Client received error from server: {error}")
                except TypeError as e:
                    # There's an edge case here -- if a Steamship package returns the JSON dictionary
                    #
                    # { "status": "status string" }
                    #
                    # Then the above handler will attempt to parse it and throw... But we don't actually want to throw
                    # since we don't take a strong opinion on what the response type of a package endpoint ought to be.
                    # It *may* choose to conform to the SteamshipResponse<T> type, but it doesn't have to.
                    if not is_package_call:
                        raise e
    
                if task is not None and task.state == TaskState.failed:
                    error = task.as_error()
    
            if "data" in response_data:
                if expect is not None:
                    if issubclass(expect, SteamshipError):
                        data = expect.from_dict({**response_data["data"], "client": self})
                    elif issubclass(expect, BaseModel):
                        data = expect.parse_obj(
                            self._add_client_to_response(expect, response_data["data"])
                        )
                    else:
                        raise RuntimeError(f"obj of type {expect} does not have a from_dict method")
                else:
                    data = response_data["data"]
    
                if task:
                    task.output = data
            else:
                data = response_data
    
        else:
            data = response_data
    
        if error is not None:
            logging.warning(f"Client received error from server: {error}", exc_info=error)
>           raise error
E           steamship.base.error.SteamshipError: [APIError]
E           While attempting to embed records, at least one embedding attempt failed. Wrapped Message: There was an error embedding simdex record with id Optional(D508E7D6-480E-4A87-B651-BD1E0B68748A) Wrapped Message: Response from remote not OK. Status = 503

src/steamship/base/client.py:537: SteamshipError

Check failure on line 52 in tests/steamship_tests/data/test_chat.py

See this annotation in the file changed.

@github-actions github-actions / Production Test Results

test_chat.test_chat_append_user

steamship.base.error.SteamshipError: [APIError]
While attempting to embed records, at least one embedding attempt failed. Wrapped Message: There was an error embedding simdex record with id Optional(3E4EA7F9-C803-4C82-A472-BB85184B0504) Wrapped Message: Response from remote not OK. Status = 503
Raw output
client = Steamship(config=Configuration(api_key=SecretStr('**********'), api_base=AnyHttpUrl('https://api.steamship.com/api/v1/...kspace_id='10DF0CDD-2AD5-42CC-ADBC-F4434A91E523', workspace_handle='test_81vapptfoh', profile='test', request_id=None))

    @pytest.mark.usefixtures("client")
    def test_chat_append_user(client: Steamship):
        chat = ChatHistory.get_or_create(client, context_keys={"test_id": uuid.uuid4().hex})
    
>       chat.append_user_message(text="some user text")

tests/steamship_tests/data/test_chat.py:52: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
src/steamship/agents/schema/chathistory.py:168: in append_user_message
    return self.append_message_with_role(text, RoleTag.USER, tags, content, url, mime_type)
src/steamship/agents/schema/chathistory.py:156: in append_message_with_role
    self.embedding_index.insert(chunk_tags)
src/steamship/data/plugin/index_plugin_instance.py:163: in insert
    self.index.insert_many(embedded_items, reindex=True, allow_long_records=allow_long_records)
src/steamship/data/embeddings.py:216: in insert_many
    return self.client.post(
src/steamship/base/client.py:579: in post
    return self.call(
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = Steamship(config=Configuration(api_key=SecretStr('**********'), api_base=AnyHttpUrl('https://api.steamship.com/api/v1/...kspace_id='10DF0CDD-2AD5-42CC-ADBC-F4434A91E523', workspace_handle='test_81vapptfoh', profile='test', request_id=None))
verb = <Verb.POST: 'POST'>, operation = 'embedding-index/item/create'
payload = IndexInsertRequest(index_id='366E723E-AF9B-4419-B9CB-371A4EAAC0CB', items=[EmbeddedItem(id=None, index_id=None, file_i...g=None)], value=None, file_id=None, block_type=None, external_id=None, external_type=None, metadata=None, reindex=True)
file = None, expect = <class 'steamship.data.embeddings.IndexInsertResponse'>
debug = False, raw_response = False, is_package_call = False
package_owner = None, package_id = None, package_instance_id = None
as_background_task = False, wait_on_tasks = None, timeout_s = None
task_delay_ms = None

    def call(  # noqa: C901
        self,
        verb: Verb,
        operation: str,
        payload: Union[Request, dict, bytes] = None,
        file: Any = None,
        expect: Type[T] = None,
        debug: bool = False,
        raw_response: bool = False,
        is_package_call: bool = False,
        package_owner: str = None,
        package_id: str = None,
        package_instance_id: str = None,
        as_background_task: bool = False,
        wait_on_tasks: List[Union[str, Task]] = None,
        timeout_s: Optional[float] = None,
        task_delay_ms: Optional[int] = None,
    ) -> Union[
        Any, Task
    ]:  # TODO (enias): I would like to list all possible return types using interfaces instead of Any
        """Post to the Steamship API.
    
        All responses have the format::
    
        .. code-block:: json
    
           {
               "data": "<actual response>",
               "error": {"reason": "<message>"}
           } # noqa: RST203
    
        For the Python client we return the contents of the `data` field if present, and we raise an exception
        if the `error` field is filled in.
        """
        # TODO (enias): Review this codebase
        url = self._url(
            is_package_call=is_package_call,
            package_owner=package_owner,
            operation=operation,
        )
    
        headers = self._headers(
            is_package_call=is_package_call,
            package_owner=package_owner,
            package_id=package_id,
            package_instance_id=package_instance_id,
            as_background_task=as_background_task,
            wait_on_tasks=wait_on_tasks,
            task_delay_ms=task_delay_ms,
        )
    
        data = self._prepare_data(payload=payload)
    
        logging.debug(
            f"Making {verb} to {url} in workspace {self.config.workspace_handle}/{self.config.workspace_id}"
        )
        if verb == Verb.POST:
            if file is not None:
                files = self._prepare_multipart_data(data, file)
                resp = self._session.post(url, files=files, headers=headers, timeout=timeout_s)
            else:
                if isinstance(data, bytes):
                    resp = self._session.post(url, data=data, headers=headers, timeout=timeout_s)
                else:
                    resp = self._session.post(url, json=data, headers=headers, timeout=timeout_s)
        elif verb == Verb.GET:
            resp = self._session.get(url, params=data, headers=headers, timeout=timeout_s)
        else:
            raise Exception(f"Unsupported verb: {verb}")
    
        logging.debug(f"From {verb} to {url} got HTTP {resp.status_code}")
    
        if debug is True:
            logging.debug(f"Got response {resp}")
    
        response_data = self._response_data(resp, raw_response=raw_response)
    
        logging.debug(f"Response JSON {response_data}")
    
        task = None
        error = None
    
        if isinstance(response_data, dict):
            if "status" in response_data:
                try:
                    task = Task.parse_obj(
                        {**response_data["status"], "client": self, "expect": expect}
                    )
                    if "state" in response_data["status"]:
                        if response_data["status"]["state"] == "failed":
                            error = SteamshipError.from_dict(response_data["status"])
                            logging.warning(f"Client received error from server: {error}")
                except TypeError as e:
                    # There's an edge case here -- if a Steamship package returns the JSON dictionary
                    #
                    # { "status": "status string" }
                    #
                    # Then the above handler will attempt to parse it and throw... But we don't actually want to throw
                    # since we don't take a strong opinion on what the response type of a package endpoint ought to be.
                    # It *may* choose to conform to the SteamshipResponse<T> type, but it doesn't have to.
                    if not is_package_call:
                        raise e
    
                if task is not None and task.state == TaskState.failed:
                    error = task.as_error()
    
            if "data" in response_data:
                if expect is not None:
                    if issubclass(expect, SteamshipError):
                        data = expect.from_dict({**response_data["data"], "client": self})
                    elif issubclass(expect, BaseModel):
                        data = expect.parse_obj(
                            self._add_client_to_response(expect, response_data["data"])
                        )
                    else:
                        raise RuntimeError(f"obj of type {expect} does not have a from_dict method")
                else:
                    data = response_data["data"]
    
                if task:
                    task.output = data
            else:
                data = response_data
    
        else:
            data = response_data
    
        if error is not None:
            logging.warning(f"Client received error from server: {error}", exc_info=error)
>           raise error
E           steamship.base.error.SteamshipError: [APIError]
E           While attempting to embed records, at least one embedding attempt failed. Wrapped Message: There was an error embedding simdex record with id Optional(3E4EA7F9-C803-4C82-A472-BB85184B0504) Wrapped Message: Response from remote not OK. Status = 503

src/steamship/base/client.py:537: SteamshipError

Check failure on line 74 in tests/steamship_tests/data/test_chat.py

See this annotation in the file changed.

@github-actions github-actions / Production Test Results

test_chat.test_chat_history_clear

steamship.base.error.SteamshipError: [APIError]
While attempting to embed records, at least one embedding attempt failed. Wrapped Message: There was an error embedding simdex record with id Optional(C5D3EF1D-40E4-43D1-B8DA-724ADC78A0E9) Wrapped Message: Response from remote not OK. Status = 503
Raw output
client = Steamship(config=Configuration(api_key=SecretStr('**********'), api_base=AnyHttpUrl('https://api.steamship.com/api/v1/...kspace_id='1CDF4969-3935-43EC-9999-D6242A5BC8D1', workspace_handle='test_mppi6kedda', profile='test', request_id=None))

    @pytest.mark.usefixtures("client")
    def test_chat_history_clear(client: Steamship):
        history = ChatHistory.get_or_create(client, context_keys={"test_id": uuid.uuid4().hex})
>       history.append_user_message(text="some user text")

tests/steamship_tests/data/test_chat.py:74: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
src/steamship/agents/schema/chathistory.py:168: in append_user_message
    return self.append_message_with_role(text, RoleTag.USER, tags, content, url, mime_type)
src/steamship/agents/schema/chathistory.py:156: in append_message_with_role
    self.embedding_index.insert(chunk_tags)
src/steamship/data/plugin/index_plugin_instance.py:163: in insert
    self.index.insert_many(embedded_items, reindex=True, allow_long_records=allow_long_records)
src/steamship/data/embeddings.py:216: in insert_many
    return self.client.post(
src/steamship/base/client.py:579: in post
    return self.call(
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = Steamship(config=Configuration(api_key=SecretStr('**********'), api_base=AnyHttpUrl('https://api.steamship.com/api/v1/...kspace_id='1CDF4969-3935-43EC-9999-D6242A5BC8D1', workspace_handle='test_mppi6kedda', profile='test', request_id=None))
verb = <Verb.POST: 'POST'>, operation = 'embedding-index/item/create'
payload = IndexInsertRequest(index_id='14386ECB-E1B5-4CE8-B940-00215D5B4228', items=[EmbeddedItem(id=None, index_id=None, file_i...g=None)], value=None, file_id=None, block_type=None, external_id=None, external_type=None, metadata=None, reindex=True)
file = None, expect = <class 'steamship.data.embeddings.IndexInsertResponse'>
debug = False, raw_response = False, is_package_call = False
package_owner = None, package_id = None, package_instance_id = None
as_background_task = False, wait_on_tasks = None, timeout_s = None
task_delay_ms = None

    def call(  # noqa: C901
        self,
        verb: Verb,
        operation: str,
        payload: Union[Request, dict, bytes] = None,
        file: Any = None,
        expect: Type[T] = None,
        debug: bool = False,
        raw_response: bool = False,
        is_package_call: bool = False,
        package_owner: str = None,
        package_id: str = None,
        package_instance_id: str = None,
        as_background_task: bool = False,
        wait_on_tasks: List[Union[str, Task]] = None,
        timeout_s: Optional[float] = None,
        task_delay_ms: Optional[int] = None,
    ) -> Union[
        Any, Task
    ]:  # TODO (enias): I would like to list all possible return types using interfaces instead of Any
        """Post to the Steamship API.
    
        All responses have the format::
    
        .. code-block:: json
    
           {
               "data": "<actual response>",
               "error": {"reason": "<message>"}
           } # noqa: RST203
    
        For the Python client we return the contents of the `data` field if present, and we raise an exception
        if the `error` field is filled in.
        """
        # TODO (enias): Review this codebase
        url = self._url(
            is_package_call=is_package_call,
            package_owner=package_owner,
            operation=operation,
        )
    
        headers = self._headers(
            is_package_call=is_package_call,
            package_owner=package_owner,
            package_id=package_id,
            package_instance_id=package_instance_id,
            as_background_task=as_background_task,
            wait_on_tasks=wait_on_tasks,
            task_delay_ms=task_delay_ms,
        )
    
        data = self._prepare_data(payload=payload)
    
        logging.debug(
            f"Making {verb} to {url} in workspace {self.config.workspace_handle}/{self.config.workspace_id}"
        )
        if verb == Verb.POST:
            if file is not None:
                files = self._prepare_multipart_data(data, file)
                resp = self._session.post(url, files=files, headers=headers, timeout=timeout_s)
            else:
                if isinstance(data, bytes):
                    resp = self._session.post(url, data=data, headers=headers, timeout=timeout_s)
                else:
                    resp = self._session.post(url, json=data, headers=headers, timeout=timeout_s)
        elif verb == Verb.GET:
            resp = self._session.get(url, params=data, headers=headers, timeout=timeout_s)
        else:
            raise Exception(f"Unsupported verb: {verb}")
    
        logging.debug(f"From {verb} to {url} got HTTP {resp.status_code}")
    
        if debug is True:
            logging.debug(f"Got response {resp}")
    
        response_data = self._response_data(resp, raw_response=raw_response)
    
        logging.debug(f"Response JSON {response_data}")
    
        task = None
        error = None
    
        if isinstance(response_data, dict):
            if "status" in response_data:
                try:
                    task = Task.parse_obj(
                        {**response_data["status"], "client": self, "expect": expect}
                    )
                    if "state" in response_data["status"]:
                        if response_data["status"]["state"] == "failed":
                            error = SteamshipError.from_dict(response_data["status"])
                            logging.warning(f"Client received error from server: {error}")
                except TypeError as e:
                    # There's an edge case here -- if a Steamship package returns the JSON dictionary
                    #
                    # { "status": "status string" }
                    #
                    # Then the above handler will attempt to parse it and throw... But we don't actually want to throw
                    # since we don't take a strong opinion on what the response type of a package endpoint ought to be.
                    # It *may* choose to conform to the SteamshipResponse<T> type, but it doesn't have to.
                    if not is_package_call:
                        raise e
    
                if task is not None and task.state == TaskState.failed:
                    error = task.as_error()
    
            if "data" in response_data:
                if expect is not None:
                    if issubclass(expect, SteamshipError):
                        data = expect.from_dict({**response_data["data"], "client": self})
                    elif issubclass(expect, BaseModel):
                        data = expect.parse_obj(
                            self._add_client_to_response(expect, response_data["data"])
                        )
                    else:
                        raise RuntimeError(f"obj of type {expect} does not have a from_dict method")
                else:
                    data = response_data["data"]
    
                if task:
                    task.output = data
            else:
                data = response_data
    
        else:
            data = response_data
    
        if error is not None:
            logging.warning(f"Client received error from server: {error}", exc_info=error)
>           raise error
E           steamship.base.error.SteamshipError: [APIError]
E           While attempting to embed records, at least one embedding attempt failed. Wrapped Message: There was an error embedding simdex record with id Optional(C5D3EF1D-40E4-43D1-B8DA-724ADC78A0E9) Wrapped Message: Response from remote not OK. Status = 503

src/steamship/base/client.py:537: SteamshipError

Check failure on line 88 in tests/steamship_tests/data/test_chat.py

See this annotation in the file changed.

@github-actions github-actions / Production Test Results

test_chat.test_chat_history_delete_some

steamship.base.error.SteamshipError: [APIError]
While attempting to embed records, at least one embedding attempt failed. Wrapped Message: There was an error embedding simdex record with id Optional(06DB3CB7-4A59-45D7-BDBB-B0313C4FBF79) Wrapped Message: The instance you are attempting to invoke failed initialization and cannot be used.  Init failure message: Initialization Failed Wrapped Message: Response from remote not OK. Status = 503
Raw output
client = Steamship(config=Configuration(api_key=SecretStr('**********'), api_base=AnyHttpUrl('https://api.steamship.com/api/v1/...kspace_id='E505059B-CF1A-4392-B188-C682588C33B9', workspace_handle='test_5nkp44jdta', profile='test', request_id=None))

    @pytest.mark.usefixtures("client")
    def test_chat_history_delete_some(client: Steamship):
        history = ChatHistory.get_or_create(client, context_keys={"test_id": uuid.uuid4().hex})
>       history.append_user_message(text="some user text")

tests/steamship_tests/data/test_chat.py:88: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
src/steamship/agents/schema/chathistory.py:168: in append_user_message
    return self.append_message_with_role(text, RoleTag.USER, tags, content, url, mime_type)
src/steamship/agents/schema/chathistory.py:156: in append_message_with_role
    self.embedding_index.insert(chunk_tags)
src/steamship/data/plugin/index_plugin_instance.py:163: in insert
    self.index.insert_many(embedded_items, reindex=True, allow_long_records=allow_long_records)
src/steamship/data/embeddings.py:216: in insert_many
    return self.client.post(
src/steamship/base/client.py:579: in post
    return self.call(
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = Steamship(config=Configuration(api_key=SecretStr('**********'), api_base=AnyHttpUrl('https://api.steamship.com/api/v1/...kspace_id='E505059B-CF1A-4392-B188-C682588C33B9', workspace_handle='test_5nkp44jdta', profile='test', request_id=None))
verb = <Verb.POST: 'POST'>, operation = 'embedding-index/item/create'
payload = IndexInsertRequest(index_id='8EF8EE64-C738-455A-BE9C-C3DABAB7047B', items=[EmbeddedItem(id=None, index_id=None, file_i...g=None)], value=None, file_id=None, block_type=None, external_id=None, external_type=None, metadata=None, reindex=True)
file = None, expect = <class 'steamship.data.embeddings.IndexInsertResponse'>
debug = False, raw_response = False, is_package_call = False
package_owner = None, package_id = None, package_instance_id = None
as_background_task = False, wait_on_tasks = None, timeout_s = None
task_delay_ms = None

    def call(  # noqa: C901
        self,
        verb: Verb,
        operation: str,
        payload: Union[Request, dict, bytes] = None,
        file: Any = None,
        expect: Type[T] = None,
        debug: bool = False,
        raw_response: bool = False,
        is_package_call: bool = False,
        package_owner: str = None,
        package_id: str = None,
        package_instance_id: str = None,
        as_background_task: bool = False,
        wait_on_tasks: List[Union[str, Task]] = None,
        timeout_s: Optional[float] = None,
        task_delay_ms: Optional[int] = None,
    ) -> Union[
        Any, Task
    ]:  # TODO (enias): I would like to list all possible return types using interfaces instead of Any
        """Post to the Steamship API.
    
        All responses have the format::
    
        .. code-block:: json
    
           {
               "data": "<actual response>",
               "error": {"reason": "<message>"}
           } # noqa: RST203
    
        For the Python client we return the contents of the `data` field if present, and we raise an exception
        if the `error` field is filled in.
        """
        # TODO (enias): Review this codebase
        url = self._url(
            is_package_call=is_package_call,
            package_owner=package_owner,
            operation=operation,
        )
    
        headers = self._headers(
            is_package_call=is_package_call,
            package_owner=package_owner,
            package_id=package_id,
            package_instance_id=package_instance_id,
            as_background_task=as_background_task,
            wait_on_tasks=wait_on_tasks,
            task_delay_ms=task_delay_ms,
        )
    
        data = self._prepare_data(payload=payload)
    
        logging.debug(
            f"Making {verb} to {url} in workspace {self.config.workspace_handle}/{self.config.workspace_id}"
        )
        if verb == Verb.POST:
            if file is not None:
                files = self._prepare_multipart_data(data, file)
                resp = self._session.post(url, files=files, headers=headers, timeout=timeout_s)
            else:
                if isinstance(data, bytes):
                    resp = self._session.post(url, data=data, headers=headers, timeout=timeout_s)
                else:
                    resp = self._session.post(url, json=data, headers=headers, timeout=timeout_s)
        elif verb == Verb.GET:
            resp = self._session.get(url, params=data, headers=headers, timeout=timeout_s)
        else:
            raise Exception(f"Unsupported verb: {verb}")
    
        logging.debug(f"From {verb} to {url} got HTTP {resp.status_code}")
    
        if debug is True:
            logging.debug(f"Got response {resp}")
    
        response_data = self._response_data(resp, raw_response=raw_response)
    
        logging.debug(f"Response JSON {response_data}")
    
        task = None
        error = None
    
        if isinstance(response_data, dict):
            if "status" in response_data:
                try:
                    task = Task.parse_obj(
                        {**response_data["status"], "client": self, "expect": expect}
                    )
                    if "state" in response_data["status"]:
                        if response_data["status"]["state"] == "failed":
                            error = SteamshipError.from_dict(response_data["status"])
                            logging.warning(f"Client received error from server: {error}")
                except TypeError as e:
                    # There's an edge case here -- if a Steamship package returns the JSON dictionary
                    #
                    # { "status": "status string" }
                    #
                    # Then the above handler will attempt to parse it and throw... But we don't actually want to throw
                    # since we don't take a strong opinion on what the response type of a package endpoint ought to be.
                    # It *may* choose to conform to the SteamshipResponse<T> type, but it doesn't have to.
                    if not is_package_call:
                        raise e
    
                if task is not None and task.state == TaskState.failed:
                    error = task.as_error()
    
            if "data" in response_data:
                if expect is not None:
                    if issubclass(expect, SteamshipError):
                        data = expect.from_dict({**response_data["data"], "client": self})
                    elif issubclass(expect, BaseModel):
                        data = expect.parse_obj(
                            self._add_client_to_response(expect, response_data["data"])
                        )
                    else:
                        raise RuntimeError(f"obj of type {expect} does not have a from_dict method")
                else:
                    data = response_data["data"]
    
                if task:
                    task.output = data
            else:
                data = response_data
    
        else:
            data = response_data
    
        if error is not None:
            logging.warning(f"Client received error from server: {error}", exc_info=error)
>           raise error
E           steamship.base.error.SteamshipError: [APIError]
E           While attempting to embed records, at least one embedding attempt failed. Wrapped Message: There was an error embedding simdex record with id Optional(06DB3CB7-4A59-45D7-BDBB-B0313C4FBF79) Wrapped Message: The instance you are attempting to invoke failed initialization and cannot be used.  Init failure message: Initialization Failed Wrapped Message: Response from remote not OK. Status = 503

src/steamship/base/client.py:537: SteamshipError

Check failure on line 1 in tests/steamship_tests/app/integration/test_e2e_mixins_indexer_pipeline.py

See this annotation in the file changed.

@github-actions github-actions / Production Test Results

test_e2e_mixins_indexer_pipeline.test_indexer_pipeline_mixin

failed on teardown with "requests.exceptions.ConnectionError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))"
Raw output
self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fc7ed89d360>
method = 'POST', url = '/api/v1/workspace/delete'
body = b'{"id": "C92ED477-F60B-40CC-B699-64332D93FC05", "handle": null}'
headers = {'User-Agent': 'python-requests/2.28.2', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-aliv..., 'X-Workspace-Id': '55119640-24BC-4381-B3E3-C4C3D7556499', 'Content-Length': '62', 'Content-Type': 'application/json'}
retries = Retry(total=0, connect=None, read=False, redirect=None, status=None)
redirect = False, assert_same_host = False
timeout = Timeout(connect=None, read=None, total=None), pool_timeout = None
release_conn = False, chunked = False, body_pos = None
response_kw = {'decode_content': False, 'preload_content': False}
parsed_url = Url(scheme=None, auth=None, host=None, port=None, path='/api/v1/workspace/delete', query=None, fragment=None)
destination_scheme = None, conn = None, release_this_conn = True
http_tunnel_required = False, err = None, clean_exit = False

    def urlopen(
        self,
        method,
        url,
        body=None,
        headers=None,
        retries=None,
        redirect=True,
        assert_same_host=True,
        timeout=_Default,
        pool_timeout=None,
        release_conn=None,
        chunked=False,
        body_pos=None,
        **response_kw
    ):
        """
        Get a connection from the pool and perform an HTTP request. This is the
        lowest level call for making a request, so you'll need to specify all
        the raw details.
    
        .. note::
    
           More commonly, it's appropriate to use a convenience method provided
           by :class:`.RequestMethods`, such as :meth:`request`.
    
        .. note::
    
           `release_conn` will only behave as expected if
           `preload_content=False` because we want to make
           `preload_content=False` the default behaviour someday soon without
           breaking backwards compatibility.
    
        :param method:
            HTTP request method (such as GET, POST, PUT, etc.)
    
        :param url:
            The URL to perform the request on.
    
        :param body:
            Data to send in the request body, either :class:`str`, :class:`bytes`,
            an iterable of :class:`str`/:class:`bytes`, or a file-like object.
    
        :param headers:
            Dictionary of custom headers to send, such as User-Agent,
            If-None-Match, etc. If None, pool headers are used. If provided,
            these headers completely replace any pool-specific headers.
    
        :param retries:
            Configure the number of retries to allow before raising a
            :class:`~urllib3.exceptions.MaxRetryError` exception.
    
            Pass ``None`` to retry until you receive a response. Pass a
            :class:`~urllib3.util.retry.Retry` object for fine-grained control
            over different types of retries.
            Pass an integer number to retry connection errors that many times,
            but no other types of errors. Pass zero to never retry.
    
            If ``False``, then retries are disabled and any exception is raised
            immediately. Also, instead of raising a MaxRetryError on redirects,
            the redirect response will be returned.
    
        :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
    
        :param redirect:
            If True, automatically handle redirects (status codes 301, 302,
            303, 307, 308). Each redirect counts as a retry. Disabling retries
            will disable redirect, too.
    
        :param assert_same_host:
            If ``True``, will make sure that the host of the pool requests is
            consistent else will raise HostChangedError. When ``False``, you can
            use the pool on an HTTP proxy and request foreign hosts.
    
        :param timeout:
            If specified, overrides the default timeout for this one
            request. It may be a float (in seconds) or an instance of
            :class:`urllib3.util.Timeout`.
    
        :param pool_timeout:
            If set and the pool is set to block=True, then this method will
            block for ``pool_timeout`` seconds and raise EmptyPoolError if no
            connection is available within the time period.
    
        :param release_conn:
            If False, then the urlopen call will not release the connection
            back into the pool once a response is received (but will release if
            you read the entire contents of the response such as when
            `preload_content=True`). This is useful if you're not preloading
            the response's content immediately. You will need to call
            ``r.release_conn()`` on the response ``r`` to return the connection
            back into the pool. If None, it takes the value of
            ``response_kw.get('preload_content', True)``.
    
        :param chunked:
            If True, urllib3 will send the body using chunked transfer
            encoding. Otherwise, urllib3 will send the body using the standard
            content-length form. Defaults to False.
    
        :param int body_pos:
            Position to seek to in file-like body in the event of a retry or
            redirect. Typically this won't need to be set because urllib3 will
            auto-populate the value when needed.
    
        :param \\**response_kw:
            Additional parameters are passed to
            :meth:`urllib3.response.HTTPResponse.from_httplib`
        """
    
        parsed_url = parse_url(url)
        destination_scheme = parsed_url.scheme
    
        if headers is None:
            headers = self.headers
    
        if not isinstance(retries, Retry):
            retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
    
        if release_conn is None:
            release_conn = response_kw.get("preload_content", True)
    
        # Check host
        if assert_same_host and not self.is_same_host(url):
            raise HostChangedError(self, url, retries)
    
        # Ensure that the URL we're connecting to is properly encoded
        if url.startswith("/"):
            url = six.ensure_str(_encode_target(url))
        else:
            url = six.ensure_str(parsed_url.url)
    
        conn = None
    
        # Track whether `conn` needs to be released before
        # returning/raising/recursing. Update this variable if necessary, and
        # leave `release_conn` constant throughout the function. That way, if
        # the function recurses, the original value of `release_conn` will be
        # passed down into the recursive call, and its value will be respected.
        #
        # See issue #651 [1] for details.
        #
        # [1] <https://github.com/urllib3/urllib3/issues/651>
        release_this_conn = release_conn
    
        http_tunnel_required = connection_requires_http_tunnel(
            self.proxy, self.proxy_config, destination_scheme
        )
    
        # Merge the proxy headers. Only done when not using HTTP CONNECT. We
        # have to copy the headers dict so we can safely change it without those
        # changes being reflected in anyone else's copy.
        if not http_tunnel_required:
            headers = headers.copy()
            headers.update(self.proxy_headers)
    
        # Must keep the exception bound to a separate variable or else Python 3
        # complains about UnboundLocalError.
        err = None
    
        # Keep track of whether we cleanly exited the except block. This
        # ensures we do proper cleanup in finally.
        clean_exit = False
    
        # Rewind body position, if needed. Record current position
        # for future rewinds in the event of a redirect/retry.
        body_pos = set_file_position(body, body_pos)
    
        try:
            # Request a connection from the queue.
            timeout_obj = self._get_timeout(timeout)
            conn = self._get_conn(timeout=pool_timeout)
    
            conn.timeout = timeout_obj.connect_timeout
    
            is_new_proxy_conn = self.proxy is not None and not getattr(
                conn, "sock", None
            )
            if is_new_proxy_conn and http_tunnel_required:
                self._prepare_proxy(conn)
    
            # Make the request on the httplib connection object.
>           httplib_response = self._make_request(
                conn,
                method,
                url,
                timeout=timeout_obj,
                body=body,
                headers=headers,
                chunked=chunked,
            )

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/urllib3/connectionpool.py:703: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fc7ed89d360>
conn = <urllib3.connection.HTTPSConnection object at 0x7fc7ed89cbe0>
method = 'POST', url = '/api/v1/workspace/delete'
timeout = Timeout(connect=None, read=None, total=None), chunked = False
httplib_request_kw = {'body': b'{"id": "C92ED477-F60B-40CC-B699-64332D93FC05", "handle": null}', 'headers': {'User-Agent': 'python-requests... 'X-Workspace-Id': '55119640-24BC-4381-B3E3-C4C3D7556499', 'Content-Length': '62', 'Content-Type': 'application/json'}}
timeout_obj = Timeout(connect=None, read=None, total=None), read_timeout = None

    def _make_request(
        self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw
    ):
        """
        Perform a request on a given urllib connection object taken from our
        pool.
    
        :param conn:
            a connection from one of our connection pools
    
        :param timeout:
            Socket timeout in seconds for the request. This can be a
            float or integer, which will set the same timeout value for
            the socket connect and the socket read, or an instance of
            :class:`urllib3.util.Timeout`, which gives you more fine-grained
            control over your timeouts.
        """
        self.num_requests += 1
    
        timeout_obj = self._get_timeout(timeout)
        timeout_obj.start_connect()
        conn.timeout = timeout_obj.connect_timeout
    
        # Trigger any extra validation we need to do.
        try:
            self._validate_conn(conn)
        except (SocketTimeout, BaseSSLError) as e:
            # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
            self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
            raise
    
        # conn.request() calls http.client.*.request, not the method in
        # urllib3.request. It also calls makefile (recv) on the socket.
        try:
            if chunked:
                conn.request_chunked(method, url, **httplib_request_kw)
            else:
                conn.request(method, url, **httplib_request_kw)
    
        # We are swallowing BrokenPipeError (errno.EPIPE) since the server is
        # legitimately able to close the connection after sending a valid response.
        # With this behaviour, the received response is still readable.
        except BrokenPipeError:
            # Python 3
            pass
        except IOError as e:
            # Python 2 and macOS/Linux
            # EPIPE and ESHUTDOWN are BrokenPipeError on Python 2, and EPROTOTYPE is needed on macOS
            # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/
            if e.errno not in {
                errno.EPIPE,
                errno.ESHUTDOWN,
                errno.EPROTOTYPE,
            }:
                raise
    
        # Reset the timeout for the recv() on the socket
        read_timeout = timeout_obj.read_timeout
    
        # App Engine doesn't have a sock attr
        if getattr(conn, "sock", None):
            # In Python 3 socket.py will catch EAGAIN and return None when you
            # try and read into the file pointer created by http.client, which
            # instead raises a BadStatusLine exception. Instead of catching
            # the exception and assuming all BadStatusLine exceptions are read
            # timeouts, check for a zero timeout before making the request.
            if read_timeout == 0:
                raise ReadTimeoutError(
                    self, url, "Read timed out. (read timeout=%s)" % read_timeout
                )
            if read_timeout is Timeout.DEFAULT_TIMEOUT:
                conn.sock.settimeout(socket.getdefaulttimeout())
            else:  # None or a value
                conn.sock.settimeout(read_timeout)
    
        # Receive the response from the server
        try:
            try:
                # Python 2.7, use buffering of HTTP responses
                httplib_response = conn.getresponse(buffering=True)
            except TypeError:
                # Python 3
                try:
                    httplib_response = conn.getresponse()
                except BaseException as e:
                    # Remove the TypeError from the exception chain in
                    # Python 3 (including for exceptions like SystemExit).
                    # Otherwise it looks like a bug in the code.
>                   six.raise_from(e, None)

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/urllib3/connectionpool.py:449: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

value = None, from_value = None

>   ???

<string>:3: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fc7ed89d360>
conn = <urllib3.connection.HTTPSConnection object at 0x7fc7ed89cbe0>
method = 'POST', url = '/api/v1/workspace/delete'
timeout = Timeout(connect=None, read=None, total=None), chunked = False
httplib_request_kw = {'body': b'{"id": "C92ED477-F60B-40CC-B699-64332D93FC05", "handle": null}', 'headers': {'User-Agent': 'python-requests... 'X-Workspace-Id': '55119640-24BC-4381-B3E3-C4C3D7556499', 'Content-Length': '62', 'Content-Type': 'application/json'}}
timeout_obj = Timeout(connect=None, read=None, total=None), read_timeout = None

    def _make_request(
        self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw
    ):
        """
        Perform a request on a given urllib connection object taken from our
        pool.
    
        :param conn:
            a connection from one of our connection pools
    
        :param timeout:
            Socket timeout in seconds for the request. This can be a
            float or integer, which will set the same timeout value for
            the socket connect and the socket read, or an instance of
            :class:`urllib3.util.Timeout`, which gives you more fine-grained
            control over your timeouts.
        """
        self.num_requests += 1
    
        timeout_obj = self._get_timeout(timeout)
        timeout_obj.start_connect()
        conn.timeout = timeout_obj.connect_timeout
    
        # Trigger any extra validation we need to do.
        try:
            self._validate_conn(conn)
        except (SocketTimeout, BaseSSLError) as e:
            # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
            self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
            raise
    
        # conn.request() calls http.client.*.request, not the method in
        # urllib3.request. It also calls makefile (recv) on the socket.
        try:
            if chunked:
                conn.request_chunked(method, url, **httplib_request_kw)
            else:
                conn.request(method, url, **httplib_request_kw)
    
        # We are swallowing BrokenPipeError (errno.EPIPE) since the server is
        # legitimately able to close the connection after sending a valid response.
        # With this behaviour, the received response is still readable.
        except BrokenPipeError:
            # Python 3
            pass
        except IOError as e:
            # Python 2 and macOS/Linux
            # EPIPE and ESHUTDOWN are BrokenPipeError on Python 2, and EPROTOTYPE is needed on macOS
            # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/
            if e.errno not in {
                errno.EPIPE,
                errno.ESHUTDOWN,
                errno.EPROTOTYPE,
            }:
                raise
    
        # Reset the timeout for the recv() on the socket
        read_timeout = timeout_obj.read_timeout
    
        # App Engine doesn't have a sock attr
        if getattr(conn, "sock", None):
            # In Python 3 socket.py will catch EAGAIN and return None when you
            # try and read into the file pointer created by http.client, which
            # instead raises a BadStatusLine exception. Instead of catching
            # the exception and assuming all BadStatusLine exceptions are read
            # timeouts, check for a zero timeout before making the request.
            if read_timeout == 0:
                raise ReadTimeoutError(
                    self, url, "Read timed out. (read timeout=%s)" % read_timeout
                )
            if read_timeout is Timeout.DEFAULT_TIMEOUT:
                conn.sock.settimeout(socket.getdefaulttimeout())
            else:  # None or a value
                conn.sock.settimeout(read_timeout)
    
        # Receive the response from the server
        try:
            try:
                # Python 2.7, use buffering of HTTP responses
                httplib_response = conn.getresponse(buffering=True)
            except TypeError:
                # Python 3
                try:
>                   httplib_response = conn.getresponse()

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/urllib3/connectionpool.py:444: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connection.HTTPSConnection object at 0x7fc7ed89cbe0>

    def getresponse(self):
        """Get the response from the server.
    
        If the HTTPConnection is in the correct state, returns an
        instance of HTTPResponse or of whatever object is returned by
        the response_class variable.
    
        If a request has not been sent or if a previous response has
        not be handled, ResponseNotReady is raised.  If the HTTP
        response indicates that the connection should be closed, then
        it will be closed before the response is returned.  When the
        connection is closed, the underlying socket is closed.
        """
    
        # if a prior response has been completed, then forget about it.
        if self.__response and self.__response.isclosed():
            self.__response = None
    
        # if a prior response exists, then it must be completed (otherwise, we
        # cannot read this response's header to determine the connection-close
        # behavior)
        #
        # note: if a prior response existed, but was connection-close, then the
        # socket and response were made independent of this HTTPConnection
        # object since a new request requires that we open a whole new
        # connection
        #
        # this means the prior response had one of two states:
        #   1) will_close: this connection was reset and the prior socket and
        #                  response operate independently
        #   2) persistent: the response was retained and we await its
        #                  isclosed() status to become true.
        #
        if self.__state != _CS_REQ_SENT or self.__response:
            raise ResponseNotReady(self.__state)
    
        if self.debuglevel > 0:
            response = self.response_class(self.sock, self.debuglevel,
                                           method=self._method)
        else:
            response = self.response_class(self.sock, method=self._method)
    
        try:
            try:
>               response.begin()

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/http/client.py:1375: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <http.client.HTTPResponse object at 0x7fc7ed50e740>

    def begin(self):
        if self.headers is not None:
            # we've already started reading the response
            return
    
        # read until we get a non-100 response
        while True:
>           version, status, reason = self._read_status()

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/http/client.py:318: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <http.client.HTTPResponse object at 0x7fc7ed50e740>

    def _read_status(self):
        line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
        if len(line) > _MAXLINE:
            raise LineTooLong("status line")
        if self.debuglevel > 0:
            print("reply:", repr(line))
        if not line:
            # Presumably, the server closed the connection before
            # sending a valid response.
>           raise RemoteDisconnected("Remote end closed connection without"
                                     " response")
E           http.client.RemoteDisconnected: Remote end closed connection without response

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/http/client.py:287: RemoteDisconnected

During handling of the above exception, another exception occurred:

self = <requests.adapters.HTTPAdapter object at 0x7fc7ed5ec2b0>
request = <PreparedRequest [POST]>, stream = False
timeout = Timeout(connect=None, read=None, total=None), verify = True
cert = None, proxies = OrderedDict()

    def send(
        self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
    ):
        """Sends PreparedRequest object. Returns Response object.
    
        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
        :param stream: (optional) Whether to stream the request content.
        :param timeout: (optional) How long to wait for the server to send
            data before giving up, as a float, or a :ref:`(connect timeout,
            read timeout) <timeouts>` tuple.
        :type timeout: float or tuple or urllib3 Timeout object
        :param verify: (optional) Either a boolean, in which case it controls whether
            we verify the server's TLS certificate, or a string, in which case it
            must be a path to a CA bundle to use
        :param cert: (optional) Any user-provided SSL certificate to be trusted.
        :param proxies: (optional) The proxies dictionary to apply to the request.
        :rtype: requests.Response
        """
    
        try:
            conn = self.get_connection(request.url, proxies)
        except LocationValueError as e:
            raise InvalidURL(e, request=request)
    
        self.cert_verify(conn, request.url, verify, cert)
        url = self.request_url(request, proxies)
        self.add_headers(
            request,
            stream=stream,
            timeout=timeout,
            verify=verify,
            cert=cert,
            proxies=proxies,
        )
    
        chunked = not (request.body is None or "Content-Length" in request.headers)
    
        if isinstance(timeout, tuple):
            try:
                connect, read = timeout
                timeout = TimeoutSauce(connect=connect, read=read)
            except ValueError:
                raise ValueError(
                    f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
                    f"or a single float to set both timeouts to the same value."
                )
        elif isinstance(timeout, TimeoutSauce):
            pass
        else:
            timeout = TimeoutSauce(connect=timeout, read=timeout)
    
        try:
            if not chunked:
>               resp = conn.urlopen(
                    method=request.method,
                    url=url,
                    body=request.body,
                    headers=request.headers,
                    redirect=False,
                    assert_same_host=False,
                    preload_content=False,
                    decode_content=False,
                    retries=self.max_retries,
                    timeout=timeout,
                )

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/requests/adapters.py:489: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fc7ed89d360>
method = 'POST', url = '/api/v1/workspace/delete'
body = b'{"id": "C92ED477-F60B-40CC-B699-64332D93FC05", "handle": null}'
headers = {'User-Agent': 'python-requests/2.28.2', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-aliv..., 'X-Workspace-Id': '55119640-24BC-4381-B3E3-C4C3D7556499', 'Content-Length': '62', 'Content-Type': 'application/json'}
retries = Retry(total=0, connect=None, read=False, redirect=None, status=None)
redirect = False, assert_same_host = False
timeout = Timeout(connect=None, read=None, total=None), pool_timeout = None
release_conn = False, chunked = False, body_pos = None
response_kw = {'decode_content': False, 'preload_content': False}
parsed_url = Url(scheme=None, auth=None, host=None, port=None, path='/api/v1/workspace/delete', query=None, fragment=None)
destination_scheme = None, conn = None, release_this_conn = True
http_tunnel_required = False, err = None, clean_exit = False

    def urlopen(
        self,
        method,
        url,
        body=None,
        headers=None,
        retries=None,
        redirect=True,
        assert_same_host=True,
        timeout=_Default,
        pool_timeout=None,
        release_conn=None,
        chunked=False,
        body_pos=None,
        **response_kw
    ):
        """
        Get a connection from the pool and perform an HTTP request. This is the
        lowest level call for making a request, so you'll need to specify all
        the raw details.
    
        .. note::
    
           More commonly, it's appropriate to use a convenience method provided
           by :class:`.RequestMethods`, such as :meth:`request`.
    
        .. note::
    
           `release_conn` will only behave as expected if
           `preload_content=False` because we want to make
           `preload_content=False` the default behaviour someday soon without
           breaking backwards compatibility.
    
        :param method:
            HTTP request method (such as GET, POST, PUT, etc.)
    
        :param url:
            The URL to perform the request on.
    
        :param body:
            Data to send in the request body, either :class:`str`, :class:`bytes`,
            an iterable of :class:`str`/:class:`bytes`, or a file-like object.
    
        :param headers:
            Dictionary of custom headers to send, such as User-Agent,
            If-None-Match, etc. If None, pool headers are used. If provided,
            these headers completely replace any pool-specific headers.
    
        :param retries:
            Configure the number of retries to allow before raising a
            :class:`~urllib3.exceptions.MaxRetryError` exception.
    
            Pass ``None`` to retry until you receive a response. Pass a
            :class:`~urllib3.util.retry.Retry` object for fine-grained control
            over different types of retries.
            Pass an integer number to retry connection errors that many times,
            but no other types of errors. Pass zero to never retry.
    
            If ``False``, then retries are disabled and any exception is raised
            immediately. Also, instead of raising a MaxRetryError on redirects,
            the redirect response will be returned.
    
        :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
    
        :param redirect:
            If True, automatically handle redirects (status codes 301, 302,
            303, 307, 308). Each redirect counts as a retry. Disabling retries
            will disable redirect, too.
    
        :param assert_same_host:
            If ``True``, will make sure that the host of the pool requests is
            consistent else will raise HostChangedError. When ``False``, you can
            use the pool on an HTTP proxy and request foreign hosts.
    
        :param timeout:
            If specified, overrides the default timeout for this one
            request. It may be a float (in seconds) or an instance of
            :class:`urllib3.util.Timeout`.
    
        :param pool_timeout:
            If set and the pool is set to block=True, then this method will
            block for ``pool_timeout`` seconds and raise EmptyPoolError if no
            connection is available within the time period.
    
        :param release_conn:
            If False, then the urlopen call will not release the connection
            back into the pool once a response is received (but will release if
            you read the entire contents of the response such as when
            `preload_content=True`). This is useful if you're not preloading
            the response's content immediately. You will need to call
            ``r.release_conn()`` on the response ``r`` to return the connection
            back into the pool. If None, it takes the value of
            ``response_kw.get('preload_content', True)``.
    
        :param chunked:
            If True, urllib3 will send the body using chunked transfer
            encoding. Otherwise, urllib3 will send the body using the standard
            content-length form. Defaults to False.
    
        :param int body_pos:
            Position to seek to in file-like body in the event of a retry or
            redirect. Typically this won't need to be set because urllib3 will
            auto-populate the value when needed.
    
        :param \\**response_kw:
            Additional parameters are passed to
            :meth:`urllib3.response.HTTPResponse.from_httplib`
        """
    
        parsed_url = parse_url(url)
        destination_scheme = parsed_url.scheme
    
        if headers is None:
            headers = self.headers
    
        if not isinstance(retries, Retry):
            retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
    
        if release_conn is None:
            release_conn = response_kw.get("preload_content", True)
    
        # Check host
        if assert_same_host and not self.is_same_host(url):
            raise HostChangedError(self, url, retries)
    
        # Ensure that the URL we're connecting to is properly encoded
        if url.startswith("/"):
            url = six.ensure_str(_encode_target(url))
        else:
            url = six.ensure_str(parsed_url.url)
    
        conn = None
    
        # Track whether `conn` needs to be released before
        # returning/raising/recursing. Update this variable if necessary, and
        # leave `release_conn` constant throughout the function. That way, if
        # the function recurses, the original value of `release_conn` will be
        # passed down into the recursive call, and its value will be respected.
        #
        # See issue #651 [1] for details.
        #
        # [1] <https://github.com/urllib3/urllib3/issues/651>
        release_this_conn = release_conn
    
        http_tunnel_required = connection_requires_http_tunnel(
            self.proxy, self.proxy_config, destination_scheme
        )
    
        # Merge the proxy headers. Only done when not using HTTP CONNECT. We
        # have to copy the headers dict so we can safely change it without those
        # changes being reflected in anyone else's copy.
        if not http_tunnel_required:
            headers = headers.copy()
            headers.update(self.proxy_headers)
    
        # Must keep the exception bound to a separate variable or else Python 3
        # complains about UnboundLocalError.
        err = None
    
        # Keep track of whether we cleanly exited the except block. This
        # ensures we do proper cleanup in finally.
        clean_exit = False
    
        # Rewind body position, if needed. Record current position
        # for future rewinds in the event of a redirect/retry.
        body_pos = set_file_position(body, body_pos)
    
        try:
            # Request a connection from the queue.
            timeout_obj = self._get_timeout(timeout)
            conn = self._get_conn(timeout=pool_timeout)
    
            conn.timeout = timeout_obj.connect_timeout
    
            is_new_proxy_conn = self.proxy is not None and not getattr(
                conn, "sock", None
            )
            if is_new_proxy_conn and http_tunnel_required:
                self._prepare_proxy(conn)
    
            # Make the request on the httplib connection object.
            httplib_response = self._make_request(
                conn,
                method,
                url,
                timeout=timeout_obj,
                body=body,
                headers=headers,
                chunked=chunked,
            )
    
            # If we're going to release the connection in ``finally:``, then
            # the response doesn't need to know about the connection. Otherwise
            # it will also try to release it and we'll have a double-release
            # mess.
            response_conn = conn if not release_conn else None
    
            # Pass method to Response for length checking
            response_kw["request_method"] = method
    
            # Import httplib's response into our own wrapper object
            response = self.ResponseCls.from_httplib(
                httplib_response,
                pool=self,
                connection=response_conn,
                retries=retries,
                **response_kw
            )
    
            # Everything went great!
            clean_exit = True
    
        except EmptyPoolError:
            # Didn't get a connection from the pool, no need to clean up
            clean_exit = True
            release_this_conn = False
            raise
    
        except (
            TimeoutError,
            HTTPException,
            SocketError,
            ProtocolError,
            BaseSSLError,
            SSLError,
            CertificateError,
        ) as e:
            # Discard the connection for these exceptions. It will be
            # replaced during the next _get_conn() call.
            clean_exit = False
    
            def _is_ssl_error_message_from_http_proxy(ssl_error):
                # We're trying to detect the message 'WRONG_VERSION_NUMBER' but
                # SSLErrors are kinda all over the place when it comes to the message,
                # so we try to cover our bases here!
                message = " ".join(re.split("[^a-z]", str(ssl_error).lower()))
                return (
                    "wrong version number" in message or "unknown protocol" in message
                )
    
            # Try to detect a common user error with proxies which is to
            # set an HTTP proxy to be HTTPS when it should be 'http://'
            # (ie {'http': 'http://proxy', 'https': 'https://proxy'})
            # Instead we add a nice error message and point to a URL.
            if (
                isinstance(e, BaseSSLError)
                and self.proxy
                and _is_ssl_error_message_from_http_proxy(e)
            ):
                e = ProxyError(
                    "Your proxy appears to only use HTTP and not HTTPS, "
                    "try changing your proxy URL to be HTTP. See: "
                    "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
                    "#https-proxy-error-http-proxy",
                    SSLError(e),
                )
            elif isinstance(e, (BaseSSLError, CertificateError)):
                e = SSLError(e)
            elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
                e = ProxyError("Cannot connect to proxy.", e)
            elif isinstance(e, (SocketError, HTTPException)):
                e = ProtocolError("Connection aborted.", e)
    
>           retries = retries.increment(
                method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]
            )

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/urllib3/connectionpool.py:785: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = Retry(total=0, connect=None, read=False, redirect=None, status=None)
method = 'POST', url = '/api/v1/workspace/delete', response = None
error = ProtocolError('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))
_pool = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fc7ed89d360>
_stacktrace = <traceback object at 0x7fc7ed50bf00>

    def increment(
        self,
        method=None,
        url=None,
        response=None,
        error=None,
        _pool=None,
        _stacktrace=None,
    ):
        """Return a new Retry object with incremented retry counters.
    
        :param response: A response object, or None, if the server did not
            return a response.
        :type response: :class:`~urllib3.response.HTTPResponse`
        :param Exception error: An error encountered during the request, or
            None if the response was received successfully.
    
        :return: A new ``Retry`` object.
        """
        if self.total is False and error:
            # Disabled, indicate to re-raise the error.
            raise six.reraise(type(error), error, _stacktrace)
    
        total = self.total
        if total is not None:
            total -= 1
    
        connect = self.connect
        read = self.read
        redirect = self.redirect
        status_count = self.status
        other = self.other
        cause = "unknown"
        status = None
        redirect_location = None
    
        if error and self._is_connection_error(error):
            # Connect retry?
            if connect is False:
                raise six.reraise(type(error), error, _stacktrace)
            elif connect is not None:
                connect -= 1
    
        elif error and self._is_read_error(error):
            # Read retry?
            if read is False or not self._is_method_retryable(method):
>               raise six.reraise(type(error), error, _stacktrace)

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/urllib3/util/retry.py:550: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

tp = <class 'urllib3.exceptions.ProtocolError'>, value = None, tb = None

    def reraise(tp, value, tb=None):
        try:
            if value is None:
                value = tp()
            if value.__traceback__ is not tb:
>               raise value.with_traceback(tb)

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/urllib3/packages/six.py:769: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fc7ed89d360>
method = 'POST', url = '/api/v1/workspace/delete'
body = b'{"id": "C92ED477-F60B-40CC-B699-64332D93FC05", "handle": null}'
headers = {'User-Agent': 'python-requests/2.28.2', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-aliv..., 'X-Workspace-Id': '55119640-24BC-4381-B3E3-C4C3D7556499', 'Content-Length': '62', 'Content-Type': 'application/json'}
retries = Retry(total=0, connect=None, read=False, redirect=None, status=None)
redirect = False, assert_same_host = False
timeout = Timeout(connect=None, read=None, total=None), pool_timeout = None
release_conn = False, chunked = False, body_pos = None
response_kw = {'decode_content': False, 'preload_content': False}
parsed_url = Url(scheme=None, auth=None, host=None, port=None, path='/api/v1/workspace/delete', query=None, fragment=None)
destination_scheme = None, conn = None, release_this_conn = True
http_tunnel_required = False, err = None, clean_exit = False

    def urlopen(
        self,
        method,
        url,
        body=None,
        headers=None,
        retries=None,
        redirect=True,
        assert_same_host=True,
        timeout=_Default,
        pool_timeout=None,
        release_conn=None,
        chunked=False,
        body_pos=None,
        **response_kw
    ):
        """
        Get a connection from the pool and perform an HTTP request. This is the
        lowest level call for making a request, so you'll need to specify all
        the raw details.
    
        .. note::
    
           More commonly, it's appropriate to use a convenience method provided
           by :class:`.RequestMethods`, such as :meth:`request`.
    
        .. note::
    
           `release_conn` will only behave as expected if
           `preload_content=False` because we want to make
           `preload_content=False` the default behaviour someday soon without
           breaking backwards compatibility.
    
        :param method:
            HTTP request method (such as GET, POST, PUT, etc.)
    
        :param url:
            The URL to perform the request on.
    
        :param body:
            Data to send in the request body, either :class:`str`, :class:`bytes`,
            an iterable of :class:`str`/:class:`bytes`, or a file-like object.
    
        :param headers:
            Dictionary of custom headers to send, such as User-Agent,
            If-None-Match, etc. If None, pool headers are used. If provided,
            these headers completely replace any pool-specific headers.
    
        :param retries:
            Configure the number of retries to allow before raising a
            :class:`~urllib3.exceptions.MaxRetryError` exception.
    
            Pass ``None`` to retry until you receive a response. Pass a
            :class:`~urllib3.util.retry.Retry` object for fine-grained control
            over different types of retries.
            Pass an integer number to retry connection errors that many times,
            but no other types of errors. Pass zero to never retry.
    
            If ``False``, then retries are disabled and any exception is raised
            immediately. Also, instead of raising a MaxRetryError on redirects,
            the redirect response will be returned.
    
        :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
    
        :param redirect:
            If True, automatically handle redirects (status codes 301, 302,
            303, 307, 308). Each redirect counts as a retry. Disabling retries
            will disable redirect, too.
    
        :param assert_same_host:
            If ``True``, will make sure that the host of the pool requests is
            consistent else will raise HostChangedError. When ``False``, you can
            use the pool on an HTTP proxy and request foreign hosts.
    
        :param timeout:
            If specified, overrides the default timeout for this one
            request. It may be a float (in seconds) or an instance of
            :class:`urllib3.util.Timeout`.
    
        :param pool_timeout:
            If set and the pool is set to block=True, then this method will
            block for ``pool_timeout`` seconds and raise EmptyPoolError if no
            connection is available within the time period.
    
        :param release_conn:
            If False, then the urlopen call will not release the connection
            back into the pool once a response is received (but will release if
            you read the entire contents of the response such as when
            `preload_content=True`). This is useful if you're not preloading
            the response's content immediately. You will need to call
            ``r.release_conn()`` on the response ``r`` to return the connection
            back into the pool. If None, it takes the value of
            ``response_kw.get('preload_content', True)``.
    
        :param chunked:
            If True, urllib3 will send the body using chunked transfer
            encoding. Otherwise, urllib3 will send the body using the standard
            content-length form. Defaults to False.
    
        :param int body_pos:
            Position to seek to in file-like body in the event of a retry or
            redirect. Typically this won't need to be set because urllib3 will
            auto-populate the value when needed.
    
        :param \\**response_kw:
            Additional parameters are passed to
            :meth:`urllib3.response.HTTPResponse.from_httplib`
        """
    
        parsed_url = parse_url(url)
        destination_scheme = parsed_url.scheme
    
        if headers is None:
            headers = self.headers
    
        if not isinstance(retries, Retry):
            retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
    
        if release_conn is None:
            release_conn = response_kw.get("preload_content", True)
    
        # Check host
        if assert_same_host and not self.is_same_host(url):
            raise HostChangedError(self, url, retries)
    
        # Ensure that the URL we're connecting to is properly encoded
        if url.startswith("/"):
            url = six.ensure_str(_encode_target(url))
        else:
            url = six.ensure_str(parsed_url.url)
    
        conn = None
    
        # Track whether `conn` needs to be released before
        # returning/raising/recursing. Update this variable if necessary, and
        # leave `release_conn` constant throughout the function. That way, if
        # the function recurses, the original value of `release_conn` will be
        # passed down into the recursive call, and its value will be respected.
        #
        # See issue #651 [1] for details.
        #
        # [1] <https://github.com/urllib3/urllib3/issues/651>
        release_this_conn = release_conn
    
        http_tunnel_required = connection_requires_http_tunnel(
            self.proxy, self.proxy_config, destination_scheme
        )
    
        # Merge the proxy headers. Only done when not using HTTP CONNECT. We
        # have to copy the headers dict so we can safely change it without those
        # changes being reflected in anyone else's copy.
        if not http_tunnel_required:
            headers = headers.copy()
            headers.update(self.proxy_headers)
    
        # Must keep the exception bound to a separate variable or else Python 3
        # complains about UnboundLocalError.
        err = None
    
        # Keep track of whether we cleanly exited the except block. This
        # ensures we do proper cleanup in finally.
        clean_exit = False
    
        # Rewind body position, if needed. Record current position
        # for future rewinds in the event of a redirect/retry.
        body_pos = set_file_position(body, body_pos)
    
        try:
            # Request a connection from the queue.
            timeout_obj = self._get_timeout(timeout)
            conn = self._get_conn(timeout=pool_timeout)
    
            conn.timeout = timeout_obj.connect_timeout
    
            is_new_proxy_conn = self.proxy is not None and not getattr(
                conn, "sock", None
            )
            if is_new_proxy_conn and http_tunnel_required:
                self._prepare_proxy(conn)
    
            # Make the request on the httplib connection object.
>           httplib_response = self._make_request(
                conn,
                method,
                url,
                timeout=timeout_obj,
                body=body,
                headers=headers,
                chunked=chunked,
            )

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/urllib3/connectionpool.py:703: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fc7ed89d360>
conn = <urllib3.connection.HTTPSConnection object at 0x7fc7ed89cbe0>
method = 'POST', url = '/api/v1/workspace/delete'
timeout = Timeout(connect=None, read=None, total=None), chunked = False
httplib_request_kw = {'body': b'{"id": "C92ED477-F60B-40CC-B699-64332D93FC05", "handle": null}', 'headers': {'User-Agent': 'python-requests... 'X-Workspace-Id': '55119640-24BC-4381-B3E3-C4C3D7556499', 'Content-Length': '62', 'Content-Type': 'application/json'}}
timeout_obj = Timeout(connect=None, read=None, total=None), read_timeout = None

    def _make_request(
        self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw
    ):
        """
        Perform a request on a given urllib connection object taken from our
        pool.
    
        :param conn:
            a connection from one of our connection pools
    
        :param timeout:
            Socket timeout in seconds for the request. This can be a
            float or integer, which will set the same timeout value for
            the socket connect and the socket read, or an instance of
            :class:`urllib3.util.Timeout`, which gives you more fine-grained
            control over your timeouts.
        """
        self.num_requests += 1
    
        timeout_obj = self._get_timeout(timeout)
        timeout_obj.start_connect()
        conn.timeout = timeout_obj.connect_timeout
    
        # Trigger any extra validation we need to do.
        try:
            self._validate_conn(conn)
        except (SocketTimeout, BaseSSLError) as e:
            # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
            self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
            raise
    
        # conn.request() calls http.client.*.request, not the method in
        # urllib3.request. It also calls makefile (recv) on the socket.
        try:
            if chunked:
                conn.request_chunked(method, url, **httplib_request_kw)
            else:
                conn.request(method, url, **httplib_request_kw)
    
        # We are swallowing BrokenPipeError (errno.EPIPE) since the server is
        # legitimately able to close the connection after sending a valid response.
        # With this behaviour, the received response is still readable.
        except BrokenPipeError:
            # Python 3
            pass
        except IOError as e:
            # Python 2 and macOS/Linux
            # EPIPE and ESHUTDOWN are BrokenPipeError on Python 2, and EPROTOTYPE is needed on macOS
            # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/
            if e.errno not in {
                errno.EPIPE,
                errno.ESHUTDOWN,
                errno.EPROTOTYPE,
            }:
                raise
    
        # Reset the timeout for the recv() on the socket
        read_timeout = timeout_obj.read_timeout
    
        # App Engine doesn't have a sock attr
        if getattr(conn, "sock", None):
            # In Python 3 socket.py will catch EAGAIN and return None when you
            # try and read into the file pointer created by http.client, which
            # instead raises a BadStatusLine exception. Instead of catching
            # the exception and assuming all BadStatusLine exceptions are read
            # timeouts, check for a zero timeout before making the request.
            if read_timeout == 0:
                raise ReadTimeoutError(
                    self, url, "Read timed out. (read timeout=%s)" % read_timeout
                )
            if read_timeout is Timeout.DEFAULT_TIMEOUT:
                conn.sock.settimeout(socket.getdefaulttimeout())
            else:  # None or a value
                conn.sock.settimeout(read_timeout)
    
        # Receive the response from the server
        try:
            try:
                # Python 2.7, use buffering of HTTP responses
                httplib_response = conn.getresponse(buffering=True)
            except TypeError:
                # Python 3
                try:
                    httplib_response = conn.getresponse()
                except BaseException as e:
                    # Remove the TypeError from the exception chain in
                    # Python 3 (including for exceptions like SystemExit).
                    # Otherwise it looks like a bug in the code.
>                   six.raise_from(e, None)

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/urllib3/connectionpool.py:449: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

value = None, from_value = None

>   ???

<string>:3: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fc7ed89d360>
conn = <urllib3.connection.HTTPSConnection object at 0x7fc7ed89cbe0>
method = 'POST', url = '/api/v1/workspace/delete'
timeout = Timeout(connect=None, read=None, total=None), chunked = False
httplib_request_kw = {'body': b'{"id": "C92ED477-F60B-40CC-B699-64332D93FC05", "handle": null}', 'headers': {'User-Agent': 'python-requests... 'X-Workspace-Id': '55119640-24BC-4381-B3E3-C4C3D7556499', 'Content-Length': '62', 'Content-Type': 'application/json'}}
timeout_obj = Timeout(connect=None, read=None, total=None), read_timeout = None

    def _make_request(
        self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw
    ):
        """
        Perform a request on a given urllib connection object taken from our
        pool.
    
        :param conn:
            a connection from one of our connection pools
    
        :param timeout:
            Socket timeout in seconds for the request. This can be a
            float or integer, which will set the same timeout value for
            the socket connect and the socket read, or an instance of
            :class:`urllib3.util.Timeout`, which gives you more fine-grained
            control over your timeouts.
        """
        self.num_requests += 1
    
        timeout_obj = self._get_timeout(timeout)
        timeout_obj.start_connect()
        conn.timeout = timeout_obj.connect_timeout
    
        # Trigger any extra validation we need to do.
        try:
            self._validate_conn(conn)
        except (SocketTimeout, BaseSSLError) as e:
            # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
            self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
            raise
    
        # conn.request() calls http.client.*.request, not the method in
        # urllib3.request. It also calls makefile (recv) on the socket.
        try:
            if chunked:
                conn.request_chunked(method, url, **httplib_request_kw)
            else:
                conn.request(method, url, **httplib_request_kw)
    
        # We are swallowing BrokenPipeError (errno.EPIPE) since the server is
        # legitimately able to close the connection after sending a valid response.
        # With this behaviour, the received response is still readable.
        except BrokenPipeError:
            # Python 3
            pass
        except IOError as e:
            # Python 2 and macOS/Linux
            # EPIPE and ESHUTDOWN are BrokenPipeError on Python 2, and EPROTOTYPE is needed on macOS
            # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/
            if e.errno not in {
                errno.EPIPE,
                errno.ESHUTDOWN,
                errno.EPROTOTYPE,
            }:
                raise
    
        # Reset the timeout for the recv() on the socket
        read_timeout = timeout_obj.read_timeout
    
        # App Engine doesn't have a sock attr
        if getattr(conn, "sock", None):
            # In Python 3 socket.py will catch EAGAIN and return None when you
            # try and read into the file pointer created by http.client, which
            # instead raises a BadStatusLine exception. Instead of catching
            # the exception and assuming all BadStatusLine exceptions are read
            # timeouts, check for a zero timeout before making the request.
            if read_timeout == 0:
                raise ReadTimeoutError(
                    self, url, "Read timed out. (read timeout=%s)" % read_timeout
                )
            if read_timeout is Timeout.DEFAULT_TIMEOUT:
                conn.sock.settimeout(socket.getdefaulttimeout())
            else:  # None or a value
                conn.sock.settimeout(read_timeout)
    
        # Receive the response from the server
        try:
            try:
                # Python 2.7, use buffering of HTTP responses
                httplib_response = conn.getresponse(buffering=True)
            except TypeError:
                # Python 3
                try:
>                   httplib_response = conn.getresponse()

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/urllib3/connectionpool.py:444: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connection.HTTPSConnection object at 0x7fc7ed89cbe0>

    def getresponse(self):
        """Get the response from the server.
    
        If the HTTPConnection is in the correct state, returns an
        instance of HTTPResponse or of whatever object is returned by
        the response_class variable.
    
        If a request has not been sent or if a previous response has
        not be handled, ResponseNotReady is raised.  If the HTTP
        response indicates that the connection should be closed, then
        it will be closed before the response is returned.  When the
        connection is closed, the underlying socket is closed.
        """
    
        # if a prior response has been completed, then forget about it.
        if self.__response and self.__response.isclosed():
            self.__response = None
    
        # if a prior response exists, then it must be completed (otherwise, we
        # cannot read this response's header to determine the connection-close
        # behavior)
        #
        # note: if a prior response existed, but was connection-close, then the
        # socket and response were made independent of this HTTPConnection
        # object since a new request requires that we open a whole new
        # connection
        #
        # this means the prior response had one of two states:
        #   1) will_close: this connection was reset and the prior socket and
        #                  response operate independently
        #   2) persistent: the response was retained and we await its
        #                  isclosed() status to become true.
        #
        if self.__state != _CS_REQ_SENT or self.__response:
            raise ResponseNotReady(self.__state)
    
        if self.debuglevel > 0:
            response = self.response_class(self.sock, self.debuglevel,
                                           method=self._method)
        else:
            response = self.response_class(self.sock, method=self._method)
    
        try:
            try:
>               response.begin()

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/http/client.py:1375: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <http.client.HTTPResponse object at 0x7fc7ed50e740>

    def begin(self):
        if self.headers is not None:
            # we've already started reading the response
            return
    
        # read until we get a non-100 response
        while True:
>           version, status, reason = self._read_status()

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/http/client.py:318: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <http.client.HTTPResponse object at 0x7fc7ed50e740>

    def _read_status(self):
        line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
        if len(line) > _MAXLINE:
            raise LineTooLong("status line")
        if self.debuglevel > 0:
            print("reply:", repr(line))
        if not line:
            # Presumably, the server closed the connection before
            # sending a valid response.
>           raise RemoteDisconnected("Remote end closed connection without"
                                     " response")
E           urllib3.exceptions.ProtocolError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))

/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/http/client.py:287: ProtocolError

During handling of the above exception, another exception occurred:

    @pytest.fixture()
    def client() -> Steamship:
        """Returns a client rooted in a new workspace, then deletes that workspace afterwards.
    
        To use, simply import this file and then write a test which takes `client`
        as an argument.
    
        Example
        -------
        The client can be used by injecting a fixture as follows::
    
            @pytest.mark.usefixtures("client")
            def test_something(client):
              pass
        """
        steamship = get_steamship_client()
        workspace_handle = random_name()
        workspace = Workspace.create(client=steamship, handle=workspace_handle)
        # NOTE: get_steamship_client takes either `workspace_handle` or `workspace_id`, but NOT `workspace` as a keyword arg
        new_client = get_steamship_client(workspace_handle=workspace_handle)
        yield new_client
>       workspace.delete()

tests/steamship_tests/utils/fixtures.py:34: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
src/steamship/data/workspace.py:46: in delete
    return self.client.post("workspace/delete", IdentifierRequest(id=self.id), expect=Workspace)
src/steamship/base/client.py:579: in post
    return self.call(
src/steamship/base/client.py:472: in call
    resp = self._session.post(url, json=data, headers=headers, timeout=timeout_s)
/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/requests/sessions.py:635: in post
    return self.request("POST", url, data=data, json=json, **kwargs)
/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/requests/sessions.py:587: in request
    resp = self.send(prep, **send_kwargs)
/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/requests/sessions.py:701: in send
    r = adapter.send(request, **kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <requests.adapters.HTTPAdapter object at 0x7fc7ed5ec2b0>
request = <PreparedRequest [POST]>, stream = False
timeout = Timeout(connect=None, read=None, total=None), verify = True
cert = None, proxies = OrderedDict()

    def send(
        self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
    ):
        """Sends PreparedRequest object. Returns Response object.
    
        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
        :param stream: (optional) Whether to stream the request content.
        :param timeout: (optional) How long to wait for the server to send
            data before giving up, as a float, or a :ref:`(connect timeout,
            read timeout) <timeouts>` tuple.
        :type timeout: float or tuple or urllib3 Timeout object
        :param verify: (optional) Either a boolean, in which case it controls whether
            we verify the server's TLS certificate, or a string, in which case it
            must be a path to a CA bundle to use
        :param cert: (optional) Any user-provided SSL certificate to be trusted.
        :param proxies: (optional) The proxies dictionary to apply to the request.
        :rtype: requests.Response
        """
    
        try:
            conn = self.get_connection(request.url, proxies)
        except LocationValueError as e:
            raise InvalidURL(e, request=request)
    
        self.cert_verify(conn, request.url, verify, cert)
        url = self.request_url(request, proxies)
        self.add_headers(
            request,
            stream=stream,
            timeout=timeout,
            verify=verify,
            cert=cert,
            proxies=proxies,
        )
    
        chunked = not (request.body is None or "Content-Length" in request.headers)
    
        if isinstance(timeout, tuple):
            try:
                connect, read = timeout
                timeout = TimeoutSauce(connect=connect, read=read)
            except ValueError:
                raise ValueError(
                    f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
                    f"or a single float to set both timeouts to the same value."
                )
        elif isinstance(timeout, TimeoutSauce):
            pass
        else:
            timeout = TimeoutSauce(connect=timeout, read=timeout)
    
        try:
            if not chunked:
                resp = conn.urlopen(
                    method=request.method,
                    url=url,
                    body=request.body,
                    headers=request.headers,