diff --git a/CHANGES.rst b/CHANGES.rst index 5b02623..0245204 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,1263 @@ .. towncrier release notes start +3.10.10 (2024-10-10) +==================== + +Bug fixes +--------- + +- Fixed error messages from :py:class:`~aiohttp.resolver.AsyncResolver` being swallowed -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9451`, :issue:`9455`. + + + + +Features +-------- + +- Added :exc:`aiohttp.ClientConnectorDNSError` for differentiating DNS resolution errors from other connector errors -- by :user:`mstojcevich`. + + + *Related issues and pull requests on GitHub:* + :issue:`8455`. + + + + +Miscellaneous internal changes +------------------------------ + +- Simplified DNS resolution throttling code to reduce chance of race conditions -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9454`. + + + + +---- + + +3.10.9 (2024-10-04) +=================== + +Bug fixes +--------- + +- Fixed proxy headers being used in the ``ConnectionKey`` hash when a proxy was not being used -- by :user:`bdraco`. + + If default headers are used, they are also used for proxy headers. This could have led to creating connections that were not needed when one was already available. + + + *Related issues and pull requests on GitHub:* + :issue:`9368`. + + + +- Widened the type of the ``trace_request_ctx`` parameter of + :meth:`ClientSession.request() ` and friends + -- by :user:`layday`. + + + *Related issues and pull requests on GitHub:* + :issue:`9397`. + + + + +Removals and backward incompatible breaking changes +--------------------------------------------------- + +- Fixed failure to try next host after single-host connection timeout -- by :user:`brettdh`. + + The default client :class:`aiohttp.ClientTimeout` params has changed to include a ``sock_connect`` timeout of 30 seconds so that this correct behavior happens by default. + + + *Related issues and pull requests on GitHub:* + :issue:`7342`. + + + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of resolving hosts with Python 3.12+ -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9342`. + + + +- Reduced memory required for timer objects created during the client request lifecycle -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9406`. + + + + +---- + + +3.10.8 (2024-09-28) +=================== + +Bug fixes +--------- + +- Fixed cancellation leaking upwards on timeout -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9326`. + + + + +---- + + +3.10.7 (2024-09-27) +=================== + +Bug fixes +--------- + +- Fixed assembling the :class:`~yarl.URL` for web requests when the host contains a non-default port or IPv6 address -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9309`. + + + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of determining if a URL is absolute -- by :user:`bdraco`. + + The property :attr:`~yarl.URL.absolute` is more performant than the method ``URL.is_absolute()`` and preferred when newer versions of yarl are used. + + + *Related issues and pull requests on GitHub:* + :issue:`9171`. + + + +- Replaced code that can now be handled by ``yarl`` -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9301`. + + + + +---- + + +3.10.6 (2024-09-24) +=================== + +Bug fixes +--------- + +- Added :exc:`aiohttp.ClientConnectionResetError`. Client code that previously threw :exc:`ConnectionResetError` + will now throw this -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`9137`. + + + +- Fixed an unclosed transport ``ResourceWarning`` on web handlers -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8875`. + + + +- Fixed resolve_host() 'Task was destroyed but is pending' errors -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8967`. + + + +- Fixed handling of some file-like objects (e.g. ``tarfile.extractfile()``) which raise ``AttributeError`` instead of ``OSError`` when ``fileno`` fails for streaming payload data -- by :user:`ReallyReivax`. + + + *Related issues and pull requests on GitHub:* + :issue:`6732`. + + + +- Fixed web router not matching pre-encoded URLs (requires yarl 1.9.6+) -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8898`, :issue:`9267`. + + + +- Fixed an error when trying to add a route for multiple methods with a path containing a regex pattern -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8998`. + + + +- Fixed ``Response.text`` when body is a ``Payload`` -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`6485`. + + + +- Fixed compressed requests failing when no body was provided -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`9108`. + + + +- Fixed client incorrectly reusing a connection when the previous message had not been fully sent -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8992`. + + + +- Fixed race condition that could cause server to close connection incorrectly at keepalive timeout -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`9140`. + + + +- Fixed Python parser chunked handling with multiple Transfer-Encoding values -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8823`. + + + +- Fixed error handling after 100-continue so server sends 500 response instead of disconnecting -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8876`. + + + +- Stopped adding a default Content-Type header when response has no content -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8858`. + + + +- Added support for URL credentials with empty (zero-length) username, e.g. ``https://:password@host`` -- by :user:`shuckc` + + + *Related issues and pull requests on GitHub:* + :issue:`6494`. + + + +- Stopped logging exceptions from ``web.run_app()`` that would be raised regardless -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`6807`. + + + +- Implemented binding to IPv6 addresses in the pytest server fixture. + + + *Related issues and pull requests on GitHub:* + :issue:`4650`. + + + +- Fixed the incorrect use of flags for ``getnameinfo()`` in the Resolver --by :user:`GitNMLee` + + Link-Local IPv6 addresses can now be handled by the Resolver correctly. + + + *Related issues and pull requests on GitHub:* + :issue:`9032`. + + + +- Fixed StreamResponse.prepared to return True after EOF is sent -- by :user:`arthurdarcet`. + + + *Related issues and pull requests on GitHub:* + :issue:`5343`. + + + +- Changed ``make_mocked_request()`` to use empty payload by default -- by :user:`rahulnht`. + + + *Related issues and pull requests on GitHub:* + :issue:`7167`. + + + +- Used more precise type for ``ClientResponseError.headers``, fixing some type errors when using them -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8768`. + + + +- Changed behavior when returning an invalid response to send a 500 response -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8845`. + + + +- Fixed response reading from closed session to throw an error immediately instead of timing out -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8878`. + + + +- Fixed ``CancelledError`` from one cleanup context stopping other contexts from completing -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8908`. + + + +- Fixed changing scheme/host in ``Response.clone()`` for absolute URLs -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8990`. + + + +- Fixed ``Site.name`` when host is an empty string -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8929`. + + + +- Updated Python parser to reject messages after a close message, matching C parser behaviour -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`9018`. + + + +- Fixed creation of ``SSLContext`` inside of :py:class:`aiohttp.TCPConnector` with multiple event loops in different threads -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9029`. + + + +- Fixed (on Python 3.11+) some edge cases where a task cancellation may get incorrectly suppressed -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`9030`. + + + +- Fixed exception information getting lost on ``HttpProcessingError`` -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`9052`. + + + +- Fixed ``If-None-Match`` not using weak comparison -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`9063`. + + + +- Fixed badly encoded charset crashing when getting response text instead of falling back to charset detector. + + + *Related issues and pull requests on GitHub:* + :issue:`9160`. + + + +- Rejected `\n` in `reason` values to avoid sending broken HTTP messages -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`9167`. + + + +- Changed :py:meth:`ClientResponse.raise_for_status() ` to only release the connection when invoked outside an ``async with`` context -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`9239`. + + + + +Features +-------- + +- Improved type on ``params`` to match the underlying type allowed by ``yarl`` -- by :user:`lpetre`. + + + *Related issues and pull requests on GitHub:* + :issue:`8564`. + + + +- Declared Python 3.13 supported -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8748`. + + + + +Removals and backward incompatible breaking changes +--------------------------------------------------- + +- Improved middleware performance -- by :user:`bdraco`. + + The ``set_current_app`` method was removed from ``UrlMappingMatchInfo`` because it is no longer used, and it was unlikely external caller would ever use it. + + + *Related issues and pull requests on GitHub:* + :issue:`9200`. + + + +- Increased minimum yarl version to 1.12.0 -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9267`. + + + + +Improved documentation +---------------------- + +- Clarified that ``GracefulExit`` needs to be handled in ``AppRunner`` and ``ServerRunner`` when using ``handle_signals=True``. -- by :user:`Daste745` + + + *Related issues and pull requests on GitHub:* + :issue:`4414`. + + + +- Clarified that auth parameter in ClientSession will persist and be included with any request to any origin, even during redirects to different origins. -- by :user:`MaximZemskov`. + + + *Related issues and pull requests on GitHub:* + :issue:`6764`. + + + +- Clarified which timeout exceptions happen on which timeouts -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8968`. + + + +- Updated ``ClientSession`` parameters to match current code -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8991`. + + + + +Packaging updates and notes for downstreams +------------------------------------------- + +- Fixed ``test_client_session_timeout_zero`` to not require internet access -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`9004`. + + + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of making requests when there are no auto headers to skip -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8847`. + + + +- Exported ``aiohttp.TraceRequestHeadersSentParams`` -- by :user:`Hadock-is-ok`. + + + *Related issues and pull requests on GitHub:* + :issue:`8947`. + + + +- Avoided tracing overhead in the http writer when there are no active traces -- by user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9031`. + + + +- Improved performance of reify Cython implementation -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9054`. + + + +- Use :meth:`URL.extend_query() ` to extend query params (requires yarl 1.11.0+) -- by :user:`bdraco`. + + If yarl is older than 1.11.0, the previous slower hand rolled version will be used. + + + *Related issues and pull requests on GitHub:* + :issue:`9068`. + + + +- Improved performance of checking if a host is an IP Address -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9095`. + + + +- Significantly improved performance of middlewares -- by :user:`bdraco`. + + The construction of the middleware wrappers is now cached and is built once per handler instead of on every request. + + + *Related issues and pull requests on GitHub:* + :issue:`9158`, :issue:`9170`. + + + +- Improved performance of web requests -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9168`, :issue:`9169`, :issue:`9172`, :issue:`9174`, :issue:`9175`, :issue:`9241`. + + + +- Improved performance of starting web requests when there is no response prepare hook -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9173`. + + + +- Significantly improved performance of expiring cookies -- by :user:`bdraco`. + + Expiring cookies has been redesigned to use :mod:`heapq` instead of a linear search, to better scale. + + + *Related issues and pull requests on GitHub:* + :issue:`9203`. + + + +- Significantly sped up filtering cookies -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9204`. + + + + +---- + + +3.10.5 (2024-08-19) +========================= + +Bug fixes +--------- + +- Fixed :meth:`aiohttp.ClientResponse.json()` not setting ``status`` when :exc:`aiohttp.ContentTypeError` is raised -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8742`. + + + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of the WebSocket reader -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8736`, :issue:`8747`. + + + + +---- + + +3.10.4 (2024-08-17) +=================== + +Bug fixes +--------- + +- Fixed decoding base64 chunk in BodyPartReader -- by :user:`hyzyla`. + + + *Related issues and pull requests on GitHub:* + :issue:`3867`. + + + +- Fixed a race closing the server-side WebSocket where the close code would not reach the client -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8680`. + + + +- Fixed unconsumed exceptions raised by the WebSocket heartbeat -- by :user:`bdraco`. + + If the heartbeat ping raised an exception, it would not be consumed and would be logged as an warning. + + + *Related issues and pull requests on GitHub:* + :issue:`8685`. + + + +- Fixed an edge case in the Python parser when chunk separators happen to align with network chunks -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8720`. + + + + +Improved documentation +---------------------- + +- Added ``aiohttp-apischema`` to supported libraries -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8700`. + + + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of starting request handlers with Python 3.12+ -- by :user:`bdraco`. + + This change is a followup to :issue:`8661` to make the same optimization for Python 3.12+ where the request is connected. + + + *Related issues and pull requests on GitHub:* + :issue:`8681`. + + + + +---- + + +3.10.3 (2024-08-10) +======================== + +Bug fixes +--------- + +- Fixed multipart reading when stream buffer splits the boundary over several read() calls -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8653`. + + + +- Fixed :py:class:`aiohttp.TCPConnector` doing blocking I/O in the event loop to create the ``SSLContext`` -- by :user:`bdraco`. + + The blocking I/O would only happen once per verify mode. However, it could cause the event loop to block for a long time if the ``SSLContext`` creation is slow, which is more likely during startup when the disk cache is not yet present. + + + *Related issues and pull requests on GitHub:* + :issue:`8672`. + + + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of :py:meth:`~aiohttp.ClientWebSocketResponse.receive` and :py:meth:`~aiohttp.web.WebSocketResponse.receive` when there is no timeout. -- by :user:`bdraco`. + + The timeout context manager is now avoided when there is no timeout as it accounted for up to 50% of the time spent in the :py:meth:`~aiohttp.ClientWebSocketResponse.receive` and :py:meth:`~aiohttp.web.WebSocketResponse.receive` methods. + + + *Related issues and pull requests on GitHub:* + :issue:`8660`. + + + +- Improved performance of starting request handlers with Python 3.12+ -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8661`. + + + +- Improved performance of HTTP keep-alive checks -- by :user:`bdraco`. + + Previously, when processing a request for a keep-alive connection, the keep-alive check would happen every second; the check is now rescheduled if it fires too early instead. + + + *Related issues and pull requests on GitHub:* + :issue:`8662`. + + + +- Improved performance of generating random WebSocket mask -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8667`. + + + + +---- + + +3.10.2 (2024-08-08) +=================== + +Bug fixes +--------- + +- Fixed server checks for circular symbolic links to be compatible with Python 3.13 -- by :user:`steverep`. + + + *Related issues and pull requests on GitHub:* + :issue:`8565`. + + + +- Fixed request body not being read when ignoring an Upgrade request -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8597`. + + + +- Fixed an edge case where shutdown would wait for timeout when the handler was already completed -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8611`. + + + +- Fixed connecting to ``npipe://``, ``tcp://``, and ``unix://`` urls -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8632`. + + + +- Fixed WebSocket ping tasks being prematurely garbage collected -- by :user:`bdraco`. + + There was a small risk that WebSocket ping tasks would be prematurely garbage collected because the event loop only holds a weak reference to the task. The garbage collection risk has been fixed by holding a strong reference to the task. Additionally, the task is now scheduled eagerly with Python 3.12+ to increase the chance it can be completed immediately and avoid having to hold any references to the task. + + + *Related issues and pull requests on GitHub:* + :issue:`8641`. + + + +- Fixed incorrectly following symlinks for compressed file variants -- by :user:`steverep`. + + + *Related issues and pull requests on GitHub:* + :issue:`8652`. + + + + +Removals and backward incompatible breaking changes +--------------------------------------------------- + +- Removed ``Request.wait_for_disconnection()``, which was mistakenly added briefly in 3.10.0 -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8636`. + + + + +Contributor-facing changes +-------------------------- + +- Fixed monkey patches for ``Path.stat()`` and ``Path.is_dir()`` for Python 3.13 compatibility -- by :user:`steverep`. + + + *Related issues and pull requests on GitHub:* + :issue:`8551`. + + + + +Miscellaneous internal changes +------------------------------ + +- Improved WebSocket performance when messages are sent or received frequently -- by :user:`bdraco`. + + The WebSocket heartbeat scheduling algorithm was improved to reduce the ``asyncio`` scheduling overhead by decreasing the number of ``asyncio.TimerHandle`` creations and cancellations. + + + *Related issues and pull requests on GitHub:* + :issue:`8608`. + + + +- Minor improvements to various type annotations -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8634`. + + + + +---- + + +3.10.1 (2024-08-03) +======================== + +Bug fixes +--------- + +- Fixed WebSocket server heartbeat timeout logic to terminate :py:meth:`~aiohttp.ClientWebSocketResponse.receive` and return :py:class:`~aiohttp.ServerTimeoutError` -- by :user:`arcivanov`. + + When a WebSocket pong message was not received, the :py:meth:`~aiohttp.ClientWebSocketResponse.receive` operation did not terminate. This change causes ``_pong_not_received`` to feed the ``reader`` an error message, causing pending :py:meth:`~aiohttp.ClientWebSocketResponse.receive` to terminate and return the error message. The error message contains the exception :py:class:`~aiohttp.ServerTimeoutError`. + + + *Related issues and pull requests on GitHub:* + :issue:`8540`. + + + +- Fixed url dispatcher index not matching when a variable is preceded by a fixed string after a slash -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8566`. + + + + +Removals and backward incompatible breaking changes +--------------------------------------------------- + +- Creating :py:class:`aiohttp.TCPConnector`, :py:class:`aiohttp.ClientSession`, :py:class:`~aiohttp.resolver.ThreadedResolver` :py:class:`aiohttp.web.Server`, or :py:class:`aiohttp.CookieJar` instances without a running event loop now raises a :exc:`RuntimeError` -- by :user:`asvetlov`. + + Creating these objects without a running event loop was deprecated in :issue:`3372` which was released in version 3.5.0. + + This change first appeared in version 3.10.0 as :issue:`6378`. + + + *Related issues and pull requests on GitHub:* + :issue:`8555`, :issue:`8583`. + + + + +---- + + +3.10.0 (2024-07-30) +======================== + +Bug fixes +--------- + +- Fixed server response headers for ``Content-Type`` and ``Content-Encoding`` for + static compressed files -- by :user:`steverep`. + + Server will now respond with a ``Content-Type`` appropriate for the compressed + file (e.g. ``"application/gzip"``), and omit the ``Content-Encoding`` header. + Users should expect that most clients will no longer decompress such responses + by default. + + + *Related issues and pull requests on GitHub:* + :issue:`4462`. + + + +- Fixed duplicate cookie expiration calls in the CookieJar implementation + + + *Related issues and pull requests on GitHub:* + :issue:`7784`. + + + +- Adjusted ``FileResponse`` to check file existence and access when preparing the response -- by :user:`steverep`. + + The :py:class:`~aiohttp.web.FileResponse` class was modified to respond with + 403 Forbidden or 404 Not Found as appropriate. Previously, it would cause a + server error if the path did not exist or could not be accessed. Checks for + existence, non-regular files, and permissions were expected to be done in the + route handler. For static routes, this now permits a compressed file to exist + without its uncompressed variant and still be served. In addition, this + changes the response status for files without read permission to 403, and for + non-regular files from 404 to 403 for consistency. + + + *Related issues and pull requests on GitHub:* + :issue:`8182`. + + + +- Fixed ``AsyncResolver`` to match ``ThreadedResolver`` behavior + -- by :user:`bdraco`. + + On system with IPv6 support, the :py:class:`~aiohttp.resolver.AsyncResolver` would not fallback + to providing A records when AAAA records were not available. + Additionally, unlike the :py:class:`~aiohttp.resolver.ThreadedResolver`, the :py:class:`~aiohttp.resolver.AsyncResolver` + did not handle link-local addresses correctly. + + This change makes the behavior consistent with the :py:class:`~aiohttp.resolver.ThreadedResolver`. + + + *Related issues and pull requests on GitHub:* + :issue:`8270`. + + + +- Fixed ``ws_connect`` not respecting `receive_timeout`` on WS(S) connection. + -- by :user:`arcivanov`. + + + *Related issues and pull requests on GitHub:* + :issue:`8444`. + + + +- Removed blocking I/O in the event loop for static resources and refactored + exception handling -- by :user:`steverep`. + + File system calls when handling requests for static routes were moved to a + separate thread to potentially improve performance. Exception handling + was tightened in order to only return 403 Forbidden or 404 Not Found responses + for expected scenarios; 500 Internal Server Error would be returned for any + unknown errors. + + + *Related issues and pull requests on GitHub:* + :issue:`8507`. + + + + +Features +-------- + +- Added a Request.wait_for_disconnection() method, as means of allowing request handlers to be notified of premature client disconnections. + + + *Related issues and pull requests on GitHub:* + :issue:`2492`. + + + +- Added 5 new exceptions: :py:exc:`~aiohttp.InvalidUrlClientError`, :py:exc:`~aiohttp.RedirectClientError`, + :py:exc:`~aiohttp.NonHttpUrlClientError`, :py:exc:`~aiohttp.InvalidUrlRedirectClientError`, + :py:exc:`~aiohttp.NonHttpUrlRedirectClientError` + + :py:exc:`~aiohttp.InvalidUrlRedirectClientError`, :py:exc:`~aiohttp.NonHttpUrlRedirectClientError` + are raised instead of :py:exc:`ValueError` or :py:exc:`~aiohttp.InvalidURL` when the redirect URL is invalid. Classes + :py:exc:`~aiohttp.InvalidUrlClientError`, :py:exc:`~aiohttp.RedirectClientError`, + :py:exc:`~aiohttp.NonHttpUrlClientError` are base for them. + + The :py:exc:`~aiohttp.InvalidURL` now exposes a ``description`` property with the text explanation of the error details. + + -- by :user:`setla`, :user:`AraHaan`, and :user:`bdraco` + + + *Related issues and pull requests on GitHub:* + :issue:`2507`, :issue:`3315`, :issue:`6722`, :issue:`8481`, :issue:`8482`. + + + +- Added a feature to retry closed connections automatically for idempotent methods. -- by :user:`Dreamsorcerer` + + + *Related issues and pull requests on GitHub:* + :issue:`7297`. + + + +- Implemented filter_cookies() with domain-matching and path-matching on the keys, instead of testing every single cookie. + This may break existing cookies that have been saved with `CookieJar.save()`. Cookies can be migrated with this script:: + + import pickle + with file_path.open("rb") as f: + cookies = pickle.load(f) + + morsels = [(name, m) for c in cookies.values() for name, m in c.items()] + cookies.clear() + for name, m in morsels: + cookies[(m["domain"], m["path"].rstrip("/"))][name] = m + + with file_path.open("wb") as f: + pickle.dump(cookies, f, pickle.HIGHEST_PROTOCOL) + + + *Related issues and pull requests on GitHub:* + :issue:`7583`, :issue:`8535`. + + + +- Separated connection and socket timeout errors, from ServerTimeoutError. + + + *Related issues and pull requests on GitHub:* + :issue:`7801`. + + + +- Implemented happy eyeballs + + + *Related issues and pull requests on GitHub:* + :issue:`7954`. + + + +- Added server capability to check for static files with Brotli compression via a ``.br`` extension -- by :user:`steverep`. + + + *Related issues and pull requests on GitHub:* + :issue:`8062`. + + + + +Removals and backward incompatible breaking changes +--------------------------------------------------- + +- The shutdown logic in 3.9 waited on all tasks, which caused issues with some libraries. + In 3.10 we've changed this logic to only wait on request handlers. This means that it's + important for developers to correctly handle the lifecycle of background tasks using a + library such as ``aiojobs``. If an application is using ``handler_cancellation=True`` then + it is also a good idea to ensure that any :func:`asyncio.shield` calls are replaced with + :func:`aiojobs.aiohttp.shield`. + + Please read the updated documentation on these points: \ + https://docs.aiohttp.org/en/stable/web_advanced.html#graceful-shutdown \ + https://docs.aiohttp.org/en/stable/web_advanced.html#web-handler-cancellation + + -- by :user:`Dreamsorcerer` + + + *Related issues and pull requests on GitHub:* + :issue:`8495`. + + + + +Improved documentation +---------------------- + +- Added documentation for ``aiohttp.web.FileResponse``. + + + *Related issues and pull requests on GitHub:* + :issue:`3958`. + + + +- Improved the docs for the `ssl` params. + + + *Related issues and pull requests on GitHub:* + :issue:`8403`. + + + + +Contributor-facing changes +-------------------------- + +- Enabled HTTP parser tests originally intended for 3.9.2 release -- by :user:`pajod`. + + + *Related issues and pull requests on GitHub:* + :issue:`8088`. + + + + +Miscellaneous internal changes +------------------------------ + +- Improved URL handler resolution time by indexing resources in the UrlDispatcher. + For applications with a large number of handlers, this should increase performance significantly. + -- by :user:`bdraco` + + + *Related issues and pull requests on GitHub:* + :issue:`7829`. + + + +- Added `nacl_middleware `_ to the list of middlewares in the third party section of the documentation. + + + *Related issues and pull requests on GitHub:* + :issue:`8346`. + + + +- Minor improvements to static typing -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8364`. + + + +- Added a 3.11-specific overloads to ``ClientSession`` -- by :user:`max-muoto`. + + + *Related issues and pull requests on GitHub:* + :issue:`8463`. + + + +- Simplified path checks for ``UrlDispatcher.add_static()`` method -- by :user:`steverep`. + + + *Related issues and pull requests on GitHub:* + :issue:`8491`. + + + +- Avoided creating a future on every websocket receive -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8498`. + + + +- Updated identity checks for all ``WSMsgType`` type compares -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8501`. + + + +- When using Python 3.12 or later, the writer is no longer scheduled on the event loop if it can finish synchronously. Avoiding event loop scheduling reduces latency and improves performance. -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8510`. + + + +- Restored :py:class:`~aiohttp.resolver.AsyncResolver` to be the default resolver. -- by :user:`bdraco`. + + :py:class:`~aiohttp.resolver.AsyncResolver` was disabled by default because + of IPv6 compatibility issues. These issues have been resolved and + :py:class:`~aiohttp.resolver.AsyncResolver` is again now the default resolver. + + + *Related issues and pull requests on GitHub:* + :issue:`8522`. + + + + +---- + + 3.9.5 (2024-04-16) ================== diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 277171a..3fb6686 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -46,6 +46,7 @@ Anes Abismail Antoine Pietri Anton Kasyanov Anton Zhdan-Pushkin +Arcadiy Ivanov Arseny Timoniq Artem Yushkovskiy Arthur Darcet @@ -59,6 +60,7 @@ Bob Haddleton Boris Feld Boyi Chen Brett Cannon +Brett Higgins Brian Bouterse Brian C. Lane Brian Muller @@ -73,6 +75,7 @@ Chih-Yuan Chen Chris AtLee Chris Laws Chris Moore +Chris Shucksmith Christopher Schmitt Claudiu Popa Colin Dunklau @@ -218,6 +221,7 @@ Manuel Miranda Marat Sharafutdinov Marc Mueller Marco Paolini +Marcus Stojcevich Mariano Anaya Mariusz Masztalerczuk Marko Kohtala @@ -269,9 +273,11 @@ Pawel Kowalski Pawel Miech Pepe Osca Philipp A. +Pierre-Louis Peeters Pieter van Beek Qiao Han Rafael Viotti +Rahul Nahata Raphael Bialon Raúl Cumplido Required Field @@ -351,6 +357,8 @@ William Grzybowski William S. Wilson Ong wouter bolsterlee +Xavier Halloran +Xiang Li Yang Zhou Yannick Koechlin Yannick Péroux @@ -367,5 +375,6 @@ Yuvi Panda Zainab Lawal Zeal Wierslee Zlatan Sičanica +Łukasz Setla Марк Коренберг Семён Марьясин diff --git a/PKG-INFO b/PKG-INFO index 0da0733..9fee64d 100644 --- a/PKG-INFO +++ b/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: aiohttp -Version: 3.9.5 +Version: 3.10.10 Summary: Async http client/server framework (asyncio) Home-page: https://github.com/aio-libs/aiohttp Maintainer: aiohttp team @@ -28,18 +28,20 @@ Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.10 Classifier: Programming Language :: Python :: 3.11 Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 Classifier: Topic :: Internet :: WWW/HTTP Requires-Python: >=3.8 Description-Content-Type: text/x-rst License-File: LICENSE.txt +Requires-Dist: aiohappyeyeballs>=2.3.0 Requires-Dist: aiosignal>=1.1.2 -Requires-Dist: attrs>=17.3.0 Requires-Dist: async-timeout<5.0,>=4.0; python_version < "3.11" +Requires-Dist: attrs>=17.3.0 Requires-Dist: frozenlist>=1.1.1 Requires-Dist: multidict<7.0,>=4.5 -Requires-Dist: yarl<2.0,>=1.0 +Requires-Dist: yarl<2.0,>=1.12.0 Provides-Extra: speedups -Requires-Dist: aiodns; (sys_platform == "linux" or sys_platform == "darwin") and extra == "speedups" +Requires-Dist: aiodns>=3.2.0; (sys_platform == "linux" or sys_platform == "darwin") and extra == "speedups" Requires-Dist: Brotli; platform_python_implementation == "CPython" and extra == "speedups" Requires-Dist: brotlicffi; platform_python_implementation != "CPython" and extra == "speedups" @@ -193,7 +195,7 @@ Communication channels *aio-libs Discussions*: https://github.com/aio-libs/aiohttp/discussions -*gitter chat* https://gitter.im/aio-libs/Lobby +*Matrix*: `#aio-libs:matrix.org `_ We support `Stack Overflow `_. @@ -202,7 +204,6 @@ Please add *aiohttp* tag to your question there. Requirements ============ -- async-timeout_ - attrs_ - multidict_ - yarl_ diff --git a/README.rst b/README.rst index 90b7f71..470ced9 100644 --- a/README.rst +++ b/README.rst @@ -148,7 +148,7 @@ Communication channels *aio-libs Discussions*: https://github.com/aio-libs/aiohttp/discussions -*gitter chat* https://gitter.im/aio-libs/Lobby +*Matrix*: `#aio-libs:matrix.org `_ We support `Stack Overflow `_. @@ -157,7 +157,6 @@ Please add *aiohttp* tag to your question there. Requirements ============ -- async-timeout_ - attrs_ - multidict_ - yarl_ diff --git a/aiohttp.egg-info/PKG-INFO b/aiohttp.egg-info/PKG-INFO index 0da0733..9fee64d 100644 --- a/aiohttp.egg-info/PKG-INFO +++ b/aiohttp.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: aiohttp -Version: 3.9.5 +Version: 3.10.10 Summary: Async http client/server framework (asyncio) Home-page: https://github.com/aio-libs/aiohttp Maintainer: aiohttp team @@ -28,18 +28,20 @@ Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.10 Classifier: Programming Language :: Python :: 3.11 Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 Classifier: Topic :: Internet :: WWW/HTTP Requires-Python: >=3.8 Description-Content-Type: text/x-rst License-File: LICENSE.txt +Requires-Dist: aiohappyeyeballs>=2.3.0 Requires-Dist: aiosignal>=1.1.2 -Requires-Dist: attrs>=17.3.0 Requires-Dist: async-timeout<5.0,>=4.0; python_version < "3.11" +Requires-Dist: attrs>=17.3.0 Requires-Dist: frozenlist>=1.1.1 Requires-Dist: multidict<7.0,>=4.5 -Requires-Dist: yarl<2.0,>=1.0 +Requires-Dist: yarl<2.0,>=1.12.0 Provides-Extra: speedups -Requires-Dist: aiodns; (sys_platform == "linux" or sys_platform == "darwin") and extra == "speedups" +Requires-Dist: aiodns>=3.2.0; (sys_platform == "linux" or sys_platform == "darwin") and extra == "speedups" Requires-Dist: Brotli; platform_python_implementation == "CPython" and extra == "speedups" Requires-Dist: brotlicffi; platform_python_implementation != "CPython" and extra == "speedups" @@ -193,7 +195,7 @@ Communication channels *aio-libs Discussions*: https://github.com/aio-libs/aiohttp/discussions -*gitter chat* https://gitter.im/aio-libs/Lobby +*Matrix*: `#aio-libs:matrix.org `_ We support `Stack Overflow `_. @@ -202,7 +204,6 @@ Please add *aiohttp* tag to your question there. Requirements ============ -- async-timeout_ - attrs_ - multidict_ - yarl_ diff --git a/aiohttp.egg-info/SOURCES.txt b/aiohttp.egg-info/SOURCES.txt index bb45ab4..ed0688f 100644 --- a/aiohttp.egg-info/SOURCES.txt +++ b/aiohttp.egg-info/SOURCES.txt @@ -40,7 +40,6 @@ aiohttp/http_exceptions.py aiohttp/http_parser.py aiohttp/http_websocket.py aiohttp/http_writer.py -aiohttp/locks.py aiohttp/log.py aiohttp/multipart.py aiohttp/payload.py @@ -177,8 +176,6 @@ requirements/runtime-deps.txt requirements/sync-direct-runtime-deps.py requirements/test.in requirements/test.txt -requirements/typing-extensions.in -requirements/typing-extensions.txt requirements/.hash/cython.txt.hash tests/aiohttp.jpg tests/aiohttp.png @@ -208,7 +205,6 @@ tests/test_http_exceptions.py tests/test_http_parser.py tests/test_http_writer.py tests/test_imports.py -tests/test_locks.py tests/test_loop.py tests/test_multipart.py tests/test_multipart_helpers.py diff --git a/aiohttp.egg-info/requires.txt b/aiohttp.egg-info/requires.txt index 7f5ecb1..816c844 100644 --- a/aiohttp.egg-info/requires.txt +++ b/aiohttp.egg-info/requires.txt @@ -1,8 +1,9 @@ +aiohappyeyeballs>=2.3.0 aiosignal>=1.1.2 attrs>=17.3.0 frozenlist>=1.1.1 multidict<7.0,>=4.5 -yarl<2.0,>=1.0 +yarl<2.0,>=1.12.0 [:python_version < "3.11"] async-timeout<5.0,>=4.0 @@ -16,4 +17,4 @@ brotlicffi Brotli [speedups:sys_platform == "linux" or sys_platform == "darwin"] -aiodns +aiodns>=3.2.0 diff --git a/aiohttp/.hash/_helpers.pyx.hash b/aiohttp/.hash/_helpers.pyx.hash index 8f38727..8da6fee 100644 --- a/aiohttp/.hash/_helpers.pyx.hash +++ b/aiohttp/.hash/_helpers.pyx.hash @@ -1 +1 @@ -5de2db35fb795ffe227e2f1007c8ba4f2ad1b9aca28cc48edc80c779203cf6e3 /home/runner/work/aiohttp/aiohttp/aiohttp/_helpers.pyx +19d98f08efd55a40c99b2fc4c8341da7ee5cc143b1a59181014c3f43a3e95423 /home/runner/work/aiohttp/aiohttp/aiohttp/_helpers.pyx diff --git a/aiohttp/.hash/_http_parser.pyx.hash b/aiohttp/.hash/_http_parser.pyx.hash index 215a0da..bfd0ede 100644 --- a/aiohttp/.hash/_http_parser.pyx.hash +++ b/aiohttp/.hash/_http_parser.pyx.hash @@ -1 +1 @@ -abaf11ab4e8ca56f90c0bc6b884de120999620a73895515a587537725b077786 /home/runner/work/aiohttp/aiohttp/aiohttp/_http_parser.pyx +e2d962e51a183b6e2723c1cb97b9f11c795bedc7093ae1eb038a7040dd8f4d70 /home/runner/work/aiohttp/aiohttp/aiohttp/_http_parser.pyx diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index e82e790..2be76a4 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,40 +1,49 @@ -__version__ = "3.9.5" +__version__ = "3.10.10" from typing import TYPE_CHECKING, Tuple from . import hdrs as hdrs from .client import ( - BaseConnector as BaseConnector, - ClientConnectionError as ClientConnectionError, - ClientConnectorCertificateError as ClientConnectorCertificateError, - ClientConnectorError as ClientConnectorError, - ClientConnectorSSLError as ClientConnectorSSLError, - ClientError as ClientError, - ClientHttpProxyError as ClientHttpProxyError, - ClientOSError as ClientOSError, - ClientPayloadError as ClientPayloadError, - ClientProxyConnectionError as ClientProxyConnectionError, - ClientRequest as ClientRequest, - ClientResponse as ClientResponse, - ClientResponseError as ClientResponseError, - ClientSession as ClientSession, - ClientSSLError as ClientSSLError, - ClientTimeout as ClientTimeout, - ClientWebSocketResponse as ClientWebSocketResponse, - ContentTypeError as ContentTypeError, - Fingerprint as Fingerprint, - InvalidURL as InvalidURL, - NamedPipeConnector as NamedPipeConnector, - RequestInfo as RequestInfo, - ServerConnectionError as ServerConnectionError, - ServerDisconnectedError as ServerDisconnectedError, - ServerFingerprintMismatch as ServerFingerprintMismatch, - ServerTimeoutError as ServerTimeoutError, - TCPConnector as TCPConnector, - TooManyRedirects as TooManyRedirects, - UnixConnector as UnixConnector, - WSServerHandshakeError as WSServerHandshakeError, - request as request, + BaseConnector, + ClientConnectionError, + ClientConnectionResetError, + ClientConnectorCertificateError, + ClientConnectorDNSError, + ClientConnectorError, + ClientConnectorSSLError, + ClientError, + ClientHttpProxyError, + ClientOSError, + ClientPayloadError, + ClientProxyConnectionError, + ClientRequest, + ClientResponse, + ClientResponseError, + ClientSession, + ClientSSLError, + ClientTimeout, + ClientWebSocketResponse, + ConnectionTimeoutError, + ContentTypeError, + Fingerprint, + InvalidURL, + InvalidUrlClientError, + InvalidUrlRedirectClientError, + NamedPipeConnector, + NonHttpUrlClientError, + NonHttpUrlRedirectClientError, + RedirectClientError, + RequestInfo, + ServerConnectionError, + ServerDisconnectedError, + ServerFingerprintMismatch, + ServerTimeoutError, + SocketTimeoutError, + TCPConnector, + TooManyRedirects, + UnixConnector, + WSServerHandshakeError, + request, ) from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar from .formdata import FormData as FormData @@ -99,6 +108,7 @@ TraceRequestChunkSentParams as TraceRequestChunkSentParams, TraceRequestEndParams as TraceRequestEndParams, TraceRequestExceptionParams as TraceRequestExceptionParams, + TraceRequestHeadersSentParams as TraceRequestHeadersSentParams, TraceRequestRedirectParams as TraceRequestRedirectParams, TraceRequestStartParams as TraceRequestStartParams, TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams, @@ -116,7 +126,9 @@ # client "BaseConnector", "ClientConnectionError", + "ClientConnectionResetError", "ClientConnectorCertificateError", + "ClientConnectorDNSError", "ClientConnectorError", "ClientConnectorSSLError", "ClientError", @@ -131,14 +143,21 @@ "ClientSession", "ClientTimeout", "ClientWebSocketResponse", + "ConnectionTimeoutError", "ContentTypeError", "Fingerprint", "InvalidURL", + "InvalidUrlClientError", + "InvalidUrlRedirectClientError", + "NonHttpUrlClientError", + "NonHttpUrlRedirectClientError", + "RedirectClientError", "RequestInfo", "ServerConnectionError", "ServerDisconnectedError", "ServerFingerprintMismatch", "ServerTimeoutError", + "SocketTimeoutError", "TCPConnector", "TooManyRedirects", "UnixConnector", @@ -210,6 +229,7 @@ "TraceRequestChunkSentParams", "TraceRequestEndParams", "TraceRequestExceptionParams", + "TraceRequestHeadersSentParams", "TraceRequestRedirectParams", "TraceRequestStartParams", "TraceResponseChunkReceivedParams", diff --git a/aiohttp/_helpers.c b/aiohttp/_helpers.c index 89aaa55..a4149f3 100644 --- a/aiohttp/_helpers.c +++ b/aiohttp/_helpers.c @@ -1,4 +1,4 @@ -/* Generated by Cython 3.0.5 */ +/* Generated by Cython 3.0.10 */ #ifndef PY_SSIZE_T_CLEAN #define PY_SSIZE_T_CLEAN @@ -24,10 +24,10 @@ #else #define __PYX_EXTRA_ABI_MODULE_NAME "" #endif -#define CYTHON_ABI "3_0_5" __PYX_EXTRA_ABI_MODULE_NAME +#define CYTHON_ABI "3_0_10" __PYX_EXTRA_ABI_MODULE_NAME #define __PYX_ABI_MODULE_NAME "_cython_" CYTHON_ABI #define __PYX_TYPE_MODULE_PREFIX __PYX_ABI_MODULE_NAME "." -#define CYTHON_HEX_VERSION 0x030005F0 +#define CYTHON_HEX_VERSION 0x03000AF0 #define CYTHON_FUTURE_DIVISION 1 #include #ifndef offsetof @@ -119,6 +119,8 @@ #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 #endif + #undef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 #elif defined(PYPY_VERSION) #define CYTHON_COMPILING_IN_PYPY 1 #define CYTHON_COMPILING_IN_CPYTHON 0 @@ -180,6 +182,8 @@ #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 #endif + #undef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 #elif defined(CYTHON_LIMITED_API) #ifdef Py_LIMITED_API #undef __PYX_LIMITED_VERSION_HEX @@ -241,7 +245,9 @@ #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 #endif -#elif defined(PY_NOGIL) + #undef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 +#elif defined(Py_GIL_DISABLED) || defined(Py_NOGIL) #define CYTHON_COMPILING_IN_PYPY 0 #define CYTHON_COMPILING_IN_CPYTHON 0 #define CYTHON_COMPILING_IN_LIMITED_API 0 @@ -250,11 +256,17 @@ #ifndef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 1 #endif + #ifndef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 0 + #endif #undef CYTHON_USE_PYTYPE_LOOKUP #define CYTHON_USE_PYTYPE_LOOKUP 0 #ifndef CYTHON_USE_ASYNC_SLOTS #define CYTHON_USE_ASYNC_SLOTS 1 #endif + #ifndef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #endif #undef CYTHON_USE_PYLIST_INTERNALS #define CYTHON_USE_PYLIST_INTERNALS 0 #ifndef CYTHON_USE_UNICODE_INTERNALS @@ -262,8 +274,6 @@ #endif #undef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 #ifndef CYTHON_AVOID_BORROWED_REFS #define CYTHON_AVOID_BORROWED_REFS 0 #endif @@ -275,11 +285,22 @@ #endif #undef CYTHON_FAST_THREAD_STATE #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL 0 + #ifndef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL 1 + #endif #undef CYTHON_FAST_PYCALL #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS 1 + #endif #ifndef CYTHON_PEP489_MULTI_PHASE_INIT #define CYTHON_PEP489_MULTI_PHASE_INIT 1 #endif + #ifndef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 0 + #endif #ifndef CYTHON_USE_TP_FINALIZE #define CYTHON_USE_TP_FINALIZE 1 #endif @@ -287,6 +308,12 @@ #define CYTHON_USE_DICT_VERSIONS 0 #undef CYTHON_USE_EXC_INFO_STACK #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 + #endif + #ifndef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 + #endif #else #define CYTHON_COMPILING_IN_PYPY 0 #define CYTHON_COMPILING_IN_CPYTHON 1 @@ -377,6 +404,9 @@ #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 #endif + #ifndef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 1 + #endif #endif #if !defined(CYTHON_FAST_PYCCALL) #define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) @@ -569,18 +599,19 @@ PyObject *exception_table = NULL; PyObject *types_module=NULL, *code_type=NULL, *result=NULL; #if __PYX_LIMITED_VERSION_HEX < 0x030B0000 - PyObject *version_info; // borrowed - #endif + PyObject *version_info; PyObject *py_minor_version = NULL; + #endif long minor_version = 0; PyObject *type, *value, *traceback; PyErr_Fetch(&type, &value, &traceback); #if __PYX_LIMITED_VERSION_HEX >= 0x030B0000 - minor_version = 11; // we don't yet need to distinguish between versions > 11 + minor_version = 11; #else if (!(version_info = PySys_GetObject("version_info"))) goto end; if (!(py_minor_version = PySequence_GetItem(version_info, 1))) goto end; minor_version = PyLong_AsLong(py_minor_version); + Py_DECREF(py_minor_version); if (minor_version == -1 && PyErr_Occurred()) goto end; #endif if (!(types_module = PyImport_ImportModule("types"))) goto end; @@ -601,7 +632,6 @@ Py_XDECREF(code_type); Py_XDECREF(exception_table); Py_XDECREF(types_module); - Py_XDECREF(py_minor_version); if (type) { PyErr_Restore(type, value, traceback); } @@ -634,7 +664,7 @@ PyObject *fv, PyObject *cell, PyObject* fn, PyObject *name, int fline, PyObject *lnos) { PyCodeObject *result; - PyObject *empty_bytes = PyBytes_FromStringAndSize("", 0); // we don't have access to __pyx_empty_bytes here + PyObject *empty_bytes = PyBytes_FromStringAndSize("", 0); if (!empty_bytes) return NULL; result = #if PY_VERSION_HEX >= 0x030C0000 @@ -720,8 +750,13 @@ typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames); #else - #define __Pyx_PyCFunctionFast _PyCFunctionFast - #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords + #if PY_VERSION_HEX >= 0x030d00A4 + # define __Pyx_PyCFunctionFast PyCFunctionFast + # define __Pyx_PyCFunctionFastWithKeywords PyCFunctionFastWithKeywords + #else + # define __Pyx_PyCFunctionFast _PyCFunctionFast + # define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords + #endif #endif #if CYTHON_METH_FASTCALL #define __Pyx_METH_FASTCALL METH_FASTCALL @@ -929,7 +964,7 @@ static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStrWithError(PyObject *dict, #endif #if CYTHON_USE_TYPE_SPECS && PY_VERSION_HEX >= 0x03080000 #define __Pyx_PyHeapTypeObject_GC_Del(obj) {\ - PyTypeObject *type = Py_TYPE(obj);\ + PyTypeObject *type = Py_TYPE((PyObject*)obj);\ assert(__Pyx_PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE));\ PyObject_GC_Del(obj);\ Py_DECREF(type);\ @@ -1073,7 +1108,7 @@ static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStrWithError(PyObject *dict, #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_Size(o) #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_Size(o) #endif -#if PY_VERSION_HEX >= 0x030d00A1 +#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 #define __Pyx_PyImport_AddModuleRef(name) PyImport_AddModuleRef(name) #else static CYTHON_INLINE PyObject *__Pyx_PyImport_AddModuleRef(const char *name) { @@ -1160,7 +1195,7 @@ static CYTHON_INLINE float __PYX_NAN() { #endif #define __PYX_MARK_ERR_POS(f_index, lineno) \ - { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } + { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } #define __PYX_ERR(f_index, lineno, Ln_error) \ { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; } @@ -1263,24 +1298,7 @@ static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); #define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) #define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) #define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) -#if CYTHON_COMPILING_IN_LIMITED_API -static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const wchar_t *u) -{ - const wchar_t *u_end = u; - while (*u_end++) ; - return (size_t)(u_end - u - 1); -} -#else -static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) -{ - const Py_UNICODE *u_end = u; - while (*u_end++) ; - return (size_t)(u_end - u - 1); -} -#endif #define __Pyx_PyUnicode_FromOrdinal(o) PyUnicode_FromOrdinal((int)o) -#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) -#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode #define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode #define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) #define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) @@ -1330,7 +1348,7 @@ static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*); #endif typedef Py_ssize_t __Pyx_compact_pylong; typedef size_t __Pyx_compact_upylong; - #else // Py < 3.12 + #else #define __Pyx_PyLong_IsNeg(x) (Py_SIZE(x) < 0) #define __Pyx_PyLong_IsNonNeg(x) (Py_SIZE(x) >= 0) #define __Pyx_PyLong_IsZero(x) (Py_SIZE(x) == 0) @@ -1465,7 +1483,9 @@ static const char *__pyx_f[] = { /*--- Type declarations ---*/ struct __pyx_obj_7aiohttp_8_helpers_reify; -/* "aiohttp/_helpers.pyx":1 +/* "aiohttp/_helpers.pyx":4 + * cdef _sentinel = object() + * * cdef class reify: # <<<<<<<<<<<<<< * """Use as a class method decorator. It operates almost exactly like * the Python `@property` decorator, but it puts the result of the @@ -1649,8 +1669,8 @@ static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int #define __Pyx_Arg_NewRef_VARARGS(arg) __Pyx_NewRef(arg) #define __Pyx_Arg_XDECREF_VARARGS(arg) Py_XDECREF(arg) #else - #define __Pyx_Arg_NewRef_VARARGS(arg) arg // no-op - #define __Pyx_Arg_XDECREF_VARARGS(arg) // no-op - arg is borrowed + #define __Pyx_Arg_NewRef_VARARGS(arg) arg + #define __Pyx_Arg_XDECREF_VARARGS(arg) #endif #define __Pyx_NumKwargs_VARARGS(kwds) PyDict_Size(kwds) #define __Pyx_KwValues_VARARGS(args, nargs) NULL @@ -1662,12 +1682,13 @@ static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int #define __Pyx_KwValues_FASTCALL(args, nargs) ((args) + (nargs)) static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s); #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 - static CYTHON_UNUSED PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues); + CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues); #else #define __Pyx_KwargsAsDict_FASTCALL(kw, kwvalues) _PyStack_AsDict(kwvalues, kw) #endif - #define __Pyx_Arg_NewRef_FASTCALL(arg) arg // no-op, __Pyx_Arg_FASTCALL is direct and this needs - #define __Pyx_Arg_XDECREF_FASTCALL(arg) // no-op - arg was returned from array + #define __Pyx_Arg_NewRef_FASTCALL(arg) arg /* no-op, __Pyx_Arg_FASTCALL is direct and this needs + to have the same reference counting */ + #define __Pyx_Arg_XDECREF_FASTCALL(arg) #else #define __Pyx_Arg_FASTCALL __Pyx_Arg_VARARGS #define __Pyx_NumKwargs_FASTCALL __Pyx_NumKwargs_VARARGS @@ -1698,27 +1719,43 @@ static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject *const *kwvalues static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); -/* GetItemInt.proto */ -#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\ - (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\ - __Pyx_GetItemInt_Generic(o, to_py_func(i)))) -#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ - (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, - int wraparound, int boundscheck); -#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ - (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL)) -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, - int wraparound, int boundscheck); -static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, - int is_list, int wraparound, int boundscheck); +/* RaiseUnexpectedTypeError.proto */ +static int __Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj); + +/* dict_getitem_default.proto */ +static PyObject* __Pyx_PyDict_GetItemDefault(PyObject* d, PyObject* key, PyObject* default_value); + +/* PyObjectCall.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); +#else +#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) +#endif + +/* UnpackUnboundCMethod.proto */ +typedef struct { + PyObject *type; + PyObject **method_name; + PyCFunction func; + PyObject *method; + int flag; +} __Pyx_CachedCFunction; + +/* CallUnboundCMethod1.proto */ +static PyObject* __Pyx__CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg); +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg); +#else +#define __Pyx_CallUnboundCMethod1(cfunc, self, arg) __Pyx__CallUnboundCMethod1(cfunc, self, arg) +#endif + +/* CallUnboundCMethod2.proto */ +static PyObject* __Pyx__CallUnboundCMethod2(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg1, PyObject* arg2); +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030600B1 +static CYTHON_INLINE PyObject *__Pyx_CallUnboundCMethod2(__Pyx_CachedCFunction *cfunc, PyObject *self, PyObject *arg1, PyObject *arg2); +#else +#define __Pyx_CallUnboundCMethod2(cfunc, self, arg1, arg2) __Pyx__CallUnboundCMethod2(cfunc, self, arg1, arg2) +#endif /* PyFunctionFastCall.proto */ #if CYTHON_FAST_PYCALL @@ -1755,13 +1792,6 @@ static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, #endif #endif -/* PyObjectCall.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); -#else -#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) -#endif - /* PyObjectCallMethO.proto */ #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); @@ -1771,40 +1801,6 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject #define __Pyx_PyObject_FastCall(func, args, nargs) __Pyx_PyObject_FastCallDict(func, args, (size_t)(nargs), NULL) static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject **args, size_t nargs, PyObject *kwargs); -/* PyObjectCallOneArg.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); - -/* ObjectGetItem.proto */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject *key); -#else -#define __Pyx_PyObject_GetItem(obj, key) PyObject_GetItem(obj, key) -#endif - -/* GetTopmostException.proto */ -#if CYTHON_USE_EXC_INFO_STACK && CYTHON_FAST_THREAD_STATE -static _PyErr_StackItem * __Pyx_PyErr_GetTopmostException(PyThreadState *tstate); -#endif - -/* SaveResetException.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_ExceptionSave(type, value, tb) __Pyx__ExceptionSave(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#define __Pyx_ExceptionReset(type, value, tb) __Pyx__ExceptionReset(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); -#else -#define __Pyx_ExceptionSave(type, value, tb) PyErr_GetExcInfo(type, value, tb) -#define __Pyx_ExceptionReset(type, value, tb) PyErr_SetExcInfo(type, value, tb) -#endif - -/* GetException.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_GetException(type, value, tb) __Pyx__GetException(__pyx_tstate, type, value, tb) -static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#else -static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb); -#endif - /* RaiseException.proto */ static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); @@ -1861,9 +1857,6 @@ static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_ve static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name); #endif -/* RaiseUnexpectedTypeError.proto */ -static int __Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj); - /* PySequenceContains.proto */ static CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) { int result = PySequence_Contains(seq, item); @@ -1876,6 +1869,28 @@ static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); /* ImportFrom.proto */ static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); +/* GetItemInt.proto */ +#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\ + (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\ + __Pyx_GetItemInt_Generic(o, to_py_func(i)))) +#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, + int is_list, int wraparound, int boundscheck); + /* GetAttr.proto */ static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *); @@ -1897,6 +1912,9 @@ static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject /* PyObjectCallNoArg.proto */ static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); +/* PyObjectCallOneArg.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); + /* PyObjectGetMethod.proto */ static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method); @@ -2020,7 +2038,7 @@ typedef struct { #endif void *defaults; int defaults_pyobjects; - size_t defaults_size; // used by FusedFunction for copying defaults + size_t defaults_size; int flags; PyObject *defaults_tuple; PyObject *defaults_kwdict; @@ -2152,6 +2170,7 @@ static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); /* #### Code section: module_declarations ### */ /* Module declarations from "aiohttp._helpers" */ +static PyObject *__pyx_v_7aiohttp_8_helpers__sentinel = 0; static PyObject *__pyx_f_7aiohttp_8_helpers___pyx_unpickle_reify__set_state(struct __pyx_obj_7aiohttp_8_helpers_reify *, PyObject *); /*proto*/ /* #### Code section: typeinfo ### */ /* #### Code section: before_global_var ### */ @@ -2161,13 +2180,14 @@ int __pyx_module_is_main_aiohttp___helpers = 0; /* Implementation of "aiohttp._helpers" */ /* #### Code section: global_var ### */ -static PyObject *__pyx_builtin_KeyError; +static PyObject *__pyx_builtin_object; static PyObject *__pyx_builtin_AttributeError; /* #### Code section: string_decls ### */ static const char __pyx_k__3[] = "."; static const char __pyx_k_gc[] = "gc"; static const char __pyx_k__10[] = "?"; static const char __pyx_k_doc[] = "__doc__"; +static const char __pyx_k_get[] = "get"; static const char __pyx_k_new[] = "__new__"; static const char __pyx_k_dict[] = "__dict__"; static const char __pyx_k_main[] = "__main__"; @@ -2180,12 +2200,12 @@ static const char __pyx_k_state[] = "state"; static const char __pyx_k_dict_2[] = "_dict"; static const char __pyx_k_enable[] = "enable"; static const char __pyx_k_import[] = "__import__"; +static const char __pyx_k_object[] = "object"; static const char __pyx_k_pickle[] = "pickle"; static const char __pyx_k_reduce[] = "__reduce__"; static const char __pyx_k_update[] = "update"; static const char __pyx_k_disable[] = "disable"; static const char __pyx_k_wrapped[] = "wrapped"; -static const char __pyx_k_KeyError[] = "KeyError"; static const char __pyx_k_getstate[] = "__getstate__"; static const char __pyx_k_pyx_type[] = "__pyx_type"; static const char __pyx_k_setstate[] = "__setstate__"; @@ -2198,7 +2218,6 @@ static const char __pyx_k_is_coroutine[] = "_is_coroutine"; static const char __pyx_k_pyx_checksum[] = "__pyx_checksum"; static const char __pyx_k_stringsource[] = ""; static const char __pyx_k_use_setstate[] = "use_setstate"; -static const char __pyx_k_class_getitem[] = "__class_getitem__"; static const char __pyx_k_reduce_cython[] = "__reduce_cython__"; static const char __pyx_k_AttributeError[] = "AttributeError"; static const char __pyx_k_pyx_PickleError[] = "__pyx_PickleError"; @@ -2220,6 +2239,7 @@ static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_6__reduce_cython__(struct __ static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_8__setstate_cython__(struct __pyx_obj_7aiohttp_8_helpers_reify *__pyx_v_self, PyObject *__pyx_v___pyx_state); /* proto */ static PyObject *__pyx_pf_7aiohttp_8_helpers___pyx_unpickle_reify(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state); /* proto */ static PyObject *__pyx_tp_new_7aiohttp_8_helpers_reify(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static __Pyx_CachedCFunction __pyx_umethod_PyDict_Type_get = {0, 0, 0, 0, 0}; /* #### Code section: late_includes ### */ /* #### Code section: module_state ### */ typedef struct { @@ -2253,14 +2273,12 @@ typedef struct { PyTypeObject *__pyx_ptype_7aiohttp_8_helpers_reify; PyObject *__pyx_n_s_AttributeError; PyObject *__pyx_kp_s_Incompatible_checksums_0x_x_vs_0; - PyObject *__pyx_n_s_KeyError; PyObject *__pyx_n_s_PickleError; PyObject *__pyx_n_s__10; PyObject *__pyx_kp_u__3; PyObject *__pyx_n_s_aiohttp__helpers; PyObject *__pyx_n_s_asyncio_coroutines; PyObject *__pyx_n_s_cache; - PyObject *__pyx_n_s_class_getitem; PyObject *__pyx_n_s_cline_in_traceback; PyObject *__pyx_n_s_dict; PyObject *__pyx_n_s_dict_2; @@ -2268,6 +2286,7 @@ typedef struct { PyObject *__pyx_n_s_doc; PyObject *__pyx_kp_u_enable; PyObject *__pyx_kp_u_gc; + PyObject *__pyx_n_s_get; PyObject *__pyx_n_s_getstate; PyObject *__pyx_n_s_import; PyObject *__pyx_n_s_is_coroutine; @@ -2275,6 +2294,7 @@ typedef struct { PyObject *__pyx_n_s_main; PyObject *__pyx_n_s_name; PyObject *__pyx_n_s_new; + PyObject *__pyx_n_s_object; PyObject *__pyx_n_s_pickle; PyObject *__pyx_n_s_pyx_PickleError; PyObject *__pyx_n_s_pyx_checksum; @@ -2355,14 +2375,12 @@ static int __pyx_m_clear(PyObject *m) { Py_CLEAR(clear_module_state->__pyx_type_7aiohttp_8_helpers_reify); Py_CLEAR(clear_module_state->__pyx_n_s_AttributeError); Py_CLEAR(clear_module_state->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0); - Py_CLEAR(clear_module_state->__pyx_n_s_KeyError); Py_CLEAR(clear_module_state->__pyx_n_s_PickleError); Py_CLEAR(clear_module_state->__pyx_n_s__10); Py_CLEAR(clear_module_state->__pyx_kp_u__3); Py_CLEAR(clear_module_state->__pyx_n_s_aiohttp__helpers); Py_CLEAR(clear_module_state->__pyx_n_s_asyncio_coroutines); Py_CLEAR(clear_module_state->__pyx_n_s_cache); - Py_CLEAR(clear_module_state->__pyx_n_s_class_getitem); Py_CLEAR(clear_module_state->__pyx_n_s_cline_in_traceback); Py_CLEAR(clear_module_state->__pyx_n_s_dict); Py_CLEAR(clear_module_state->__pyx_n_s_dict_2); @@ -2370,6 +2388,7 @@ static int __pyx_m_clear(PyObject *m) { Py_CLEAR(clear_module_state->__pyx_n_s_doc); Py_CLEAR(clear_module_state->__pyx_kp_u_enable); Py_CLEAR(clear_module_state->__pyx_kp_u_gc); + Py_CLEAR(clear_module_state->__pyx_n_s_get); Py_CLEAR(clear_module_state->__pyx_n_s_getstate); Py_CLEAR(clear_module_state->__pyx_n_s_import); Py_CLEAR(clear_module_state->__pyx_n_s_is_coroutine); @@ -2377,6 +2396,7 @@ static int __pyx_m_clear(PyObject *m) { Py_CLEAR(clear_module_state->__pyx_n_s_main); Py_CLEAR(clear_module_state->__pyx_n_s_name); Py_CLEAR(clear_module_state->__pyx_n_s_new); + Py_CLEAR(clear_module_state->__pyx_n_s_object); Py_CLEAR(clear_module_state->__pyx_n_s_pickle); Py_CLEAR(clear_module_state->__pyx_n_s_pyx_PickleError); Py_CLEAR(clear_module_state->__pyx_n_s_pyx_checksum); @@ -2435,14 +2455,12 @@ static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { Py_VISIT(traverse_module_state->__pyx_type_7aiohttp_8_helpers_reify); Py_VISIT(traverse_module_state->__pyx_n_s_AttributeError); Py_VISIT(traverse_module_state->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0); - Py_VISIT(traverse_module_state->__pyx_n_s_KeyError); Py_VISIT(traverse_module_state->__pyx_n_s_PickleError); Py_VISIT(traverse_module_state->__pyx_n_s__10); Py_VISIT(traverse_module_state->__pyx_kp_u__3); Py_VISIT(traverse_module_state->__pyx_n_s_aiohttp__helpers); Py_VISIT(traverse_module_state->__pyx_n_s_asyncio_coroutines); Py_VISIT(traverse_module_state->__pyx_n_s_cache); - Py_VISIT(traverse_module_state->__pyx_n_s_class_getitem); Py_VISIT(traverse_module_state->__pyx_n_s_cline_in_traceback); Py_VISIT(traverse_module_state->__pyx_n_s_dict); Py_VISIT(traverse_module_state->__pyx_n_s_dict_2); @@ -2450,6 +2468,7 @@ static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { Py_VISIT(traverse_module_state->__pyx_n_s_doc); Py_VISIT(traverse_module_state->__pyx_kp_u_enable); Py_VISIT(traverse_module_state->__pyx_kp_u_gc); + Py_VISIT(traverse_module_state->__pyx_n_s_get); Py_VISIT(traverse_module_state->__pyx_n_s_getstate); Py_VISIT(traverse_module_state->__pyx_n_s_import); Py_VISIT(traverse_module_state->__pyx_n_s_is_coroutine); @@ -2457,6 +2476,7 @@ static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { Py_VISIT(traverse_module_state->__pyx_n_s_main); Py_VISIT(traverse_module_state->__pyx_n_s_name); Py_VISIT(traverse_module_state->__pyx_n_s_new); + Py_VISIT(traverse_module_state->__pyx_n_s_object); Py_VISIT(traverse_module_state->__pyx_n_s_pickle); Py_VISIT(traverse_module_state->__pyx_n_s_pyx_PickleError); Py_VISIT(traverse_module_state->__pyx_n_s_pyx_checksum); @@ -2525,14 +2545,12 @@ static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { #define __pyx_ptype_7aiohttp_8_helpers_reify __pyx_mstate_global->__pyx_ptype_7aiohttp_8_helpers_reify #define __pyx_n_s_AttributeError __pyx_mstate_global->__pyx_n_s_AttributeError #define __pyx_kp_s_Incompatible_checksums_0x_x_vs_0 __pyx_mstate_global->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0 -#define __pyx_n_s_KeyError __pyx_mstate_global->__pyx_n_s_KeyError #define __pyx_n_s_PickleError __pyx_mstate_global->__pyx_n_s_PickleError #define __pyx_n_s__10 __pyx_mstate_global->__pyx_n_s__10 #define __pyx_kp_u__3 __pyx_mstate_global->__pyx_kp_u__3 #define __pyx_n_s_aiohttp__helpers __pyx_mstate_global->__pyx_n_s_aiohttp__helpers #define __pyx_n_s_asyncio_coroutines __pyx_mstate_global->__pyx_n_s_asyncio_coroutines #define __pyx_n_s_cache __pyx_mstate_global->__pyx_n_s_cache -#define __pyx_n_s_class_getitem __pyx_mstate_global->__pyx_n_s_class_getitem #define __pyx_n_s_cline_in_traceback __pyx_mstate_global->__pyx_n_s_cline_in_traceback #define __pyx_n_s_dict __pyx_mstate_global->__pyx_n_s_dict #define __pyx_n_s_dict_2 __pyx_mstate_global->__pyx_n_s_dict_2 @@ -2540,6 +2558,7 @@ static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { #define __pyx_n_s_doc __pyx_mstate_global->__pyx_n_s_doc #define __pyx_kp_u_enable __pyx_mstate_global->__pyx_kp_u_enable #define __pyx_kp_u_gc __pyx_mstate_global->__pyx_kp_u_gc +#define __pyx_n_s_get __pyx_mstate_global->__pyx_n_s_get #define __pyx_n_s_getstate __pyx_mstate_global->__pyx_n_s_getstate #define __pyx_n_s_import __pyx_mstate_global->__pyx_n_s_import #define __pyx_n_s_is_coroutine __pyx_mstate_global->__pyx_n_s_is_coroutine @@ -2547,6 +2566,7 @@ static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { #define __pyx_n_s_main __pyx_mstate_global->__pyx_n_s_main #define __pyx_n_s_name __pyx_mstate_global->__pyx_n_s_name #define __pyx_n_s_new __pyx_mstate_global->__pyx_n_s_new +#define __pyx_n_s_object __pyx_mstate_global->__pyx_n_s_object #define __pyx_n_s_pickle __pyx_mstate_global->__pyx_n_s_pickle #define __pyx_n_s_pyx_PickleError __pyx_mstate_global->__pyx_n_s_pyx_PickleError #define __pyx_n_s_pyx_checksum __pyx_mstate_global->__pyx_n_s_pyx_checksum @@ -2583,7 +2603,7 @@ static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { #define __pyx_codeobj__9 __pyx_mstate_global->__pyx_codeobj__9 /* #### Code section: module_code ### */ -/* "aiohttp/_helpers.pyx":13 +/* "aiohttp/_helpers.pyx":16 * cdef object name * * def __init__(self, wrapped): # <<<<<<<<<<<<<< @@ -2627,12 +2647,12 @@ static int __pyx_pw_7aiohttp_8_helpers_5reify_1__init__(PyObject *__pyx_v_self, (void)__Pyx_Arg_NewRef_VARARGS(values[0]); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 13, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 16, __pyx_L3_error) else goto __pyx_L5_argtuple_error; } if (unlikely(kw_args > 0)) { const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__init__") < 0)) __PYX_ERR(0, 13, __pyx_L3_error) + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__init__") < 0)) __PYX_ERR(0, 16, __pyx_L3_error) } } else if (unlikely(__pyx_nargs != 1)) { goto __pyx_L5_argtuple_error; @@ -2643,7 +2663,7 @@ static int __pyx_pw_7aiohttp_8_helpers_5reify_1__init__(PyObject *__pyx_v_self, } goto __pyx_L6_skip; __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__init__", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 13, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("__init__", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 16, __pyx_L3_error) __pyx_L6_skip:; goto __pyx_L4_argument_unpacking_done; __pyx_L3_error:; @@ -2679,7 +2699,7 @@ static int __pyx_pf_7aiohttp_8_helpers_5reify___init__(struct __pyx_obj_7aiohttp int __pyx_clineno = 0; __Pyx_RefNannySetupContext("__init__", 1); - /* "aiohttp/_helpers.pyx":14 + /* "aiohttp/_helpers.pyx":17 * * def __init__(self, wrapped): * self.wrapped = wrapped # <<<<<<<<<<<<<< @@ -2692,14 +2712,14 @@ static int __pyx_pf_7aiohttp_8_helpers_5reify___init__(struct __pyx_obj_7aiohttp __Pyx_DECREF(__pyx_v_self->wrapped); __pyx_v_self->wrapped = __pyx_v_wrapped; - /* "aiohttp/_helpers.pyx":15 + /* "aiohttp/_helpers.pyx":18 * def __init__(self, wrapped): * self.wrapped = wrapped * self.name = wrapped.__name__ # <<<<<<<<<<<<<< * * @property */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_wrapped, __pyx_n_s_name); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 15, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_wrapped, __pyx_n_s_name); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 18, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_GIVEREF(__pyx_t_1); __Pyx_GOTREF(__pyx_v_self->name); @@ -2707,7 +2727,7 @@ static int __pyx_pf_7aiohttp_8_helpers_5reify___init__(struct __pyx_obj_7aiohttp __pyx_v_self->name = __pyx_t_1; __pyx_t_1 = 0; - /* "aiohttp/_helpers.pyx":13 + /* "aiohttp/_helpers.pyx":16 * cdef object name * * def __init__(self, wrapped): # <<<<<<<<<<<<<< @@ -2727,7 +2747,7 @@ static int __pyx_pf_7aiohttp_8_helpers_5reify___init__(struct __pyx_obj_7aiohttp return __pyx_r; } -/* "aiohttp/_helpers.pyx":17 +/* "aiohttp/_helpers.pyx":20 * self.name = wrapped.__name__ * * @property # <<<<<<<<<<<<<< @@ -2759,7 +2779,7 @@ static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_7__doc_____get__(struct __py int __pyx_clineno = 0; __Pyx_RefNannySetupContext("__get__", 1); - /* "aiohttp/_helpers.pyx":19 + /* "aiohttp/_helpers.pyx":22 * @property * def __doc__(self): * return self.wrapped.__doc__ # <<<<<<<<<<<<<< @@ -2767,13 +2787,13 @@ static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_7__doc_____get__(struct __py * def __get__(self, inst, owner): */ __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->wrapped, __pyx_n_s_doc); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 19, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->wrapped, __pyx_n_s_doc); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 22, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0; - /* "aiohttp/_helpers.pyx":17 + /* "aiohttp/_helpers.pyx":20 * self.name = wrapped.__name__ * * @property # <<<<<<<<<<<<<< @@ -2792,12 +2812,12 @@ static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_7__doc_____get__(struct __py return __pyx_r; } -/* "aiohttp/_helpers.pyx":21 +/* "aiohttp/_helpers.pyx":24 * return self.wrapped.__doc__ * * def __get__(self, inst, owner): # <<<<<<<<<<<<<< - * try: - * try: + * if inst is None: + * return self */ /* Python wrapper */ @@ -2816,320 +2836,174 @@ static PyObject *__pyx_pw_7aiohttp_8_helpers_5reify_3__get__(PyObject *__pyx_v_s } static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_2__get__(struct __pyx_obj_7aiohttp_8_helpers_reify *__pyx_v_self, PyObject *__pyx_v_inst, CYTHON_UNUSED PyObject *__pyx_v_owner) { + PyObject *__pyx_v_cache = 0; PyObject *__pyx_v_val = NULL; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; + int __pyx_t_1; PyObject *__pyx_t_2 = NULL; PyObject *__pyx_t_3 = NULL; PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - PyObject *__pyx_t_8 = NULL; - int __pyx_t_9; - PyObject *__pyx_t_10 = NULL; - PyObject *__pyx_t_11 = NULL; - PyObject *__pyx_t_12 = NULL; - PyObject *__pyx_t_13 = NULL; - int __pyx_t_14; + int __pyx_t_5; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannySetupContext("__get__", 1); - /* "aiohttp/_helpers.pyx":22 + /* "aiohttp/_helpers.pyx":25 * * def __get__(self, inst, owner): - * try: # <<<<<<<<<<<<<< - * try: - * return inst._cache[self.name] + * if inst is None: # <<<<<<<<<<<<<< + * return self + * cdef dict cache = inst._cache */ - { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3); - __Pyx_XGOTREF(__pyx_t_1); - __Pyx_XGOTREF(__pyx_t_2); - __Pyx_XGOTREF(__pyx_t_3); - /*try:*/ { + __pyx_t_1 = (__pyx_v_inst == Py_None); + if (__pyx_t_1) { - /* "aiohttp/_helpers.pyx":23 + /* "aiohttp/_helpers.pyx":26 * def __get__(self, inst, owner): - * try: - * try: # <<<<<<<<<<<<<< - * return inst._cache[self.name] - * except KeyError: - */ - { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ExceptionSave(&__pyx_t_4, &__pyx_t_5, &__pyx_t_6); - __Pyx_XGOTREF(__pyx_t_4); - __Pyx_XGOTREF(__pyx_t_5); - __Pyx_XGOTREF(__pyx_t_6); - /*try:*/ { - - /* "aiohttp/_helpers.pyx":24 - * try: - * try: - * return inst._cache[self.name] # <<<<<<<<<<<<<< - * except KeyError: - * val = self.wrapped(inst) + * if inst is None: + * return self # <<<<<<<<<<<<<< + * cdef dict cache = inst._cache + * val = cache.get(self.name, _sentinel) */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_inst, __pyx_n_s_cache); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 24, __pyx_L9_error) - __Pyx_GOTREF(__pyx_t_7); - __pyx_t_8 = __Pyx_PyObject_GetItem(__pyx_t_7, __pyx_v_self->name); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 24, __pyx_L9_error) - __Pyx_GOTREF(__pyx_t_8); - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - __pyx_r = __pyx_t_8; - __pyx_t_8 = 0; - goto __pyx_L13_try_return; - - /* "aiohttp/_helpers.pyx":23 + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF((PyObject *)__pyx_v_self); + __pyx_r = ((PyObject *)__pyx_v_self); + goto __pyx_L0; + + /* "aiohttp/_helpers.pyx":25 + * * def __get__(self, inst, owner): - * try: - * try: # <<<<<<<<<<<<<< - * return inst._cache[self.name] - * except KeyError: + * if inst is None: # <<<<<<<<<<<<<< + * return self + * cdef dict cache = inst._cache */ - } - __pyx_L9_error:; - __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; - - /* "aiohttp/_helpers.pyx":25 - * try: - * return inst._cache[self.name] - * except KeyError: # <<<<<<<<<<<<<< - * val = self.wrapped(inst) - * inst._cache[self.name] = val - */ - __pyx_t_9 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_KeyError); - if (__pyx_t_9) { - __Pyx_AddTraceback("aiohttp._helpers.reify.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); - if (__Pyx_GetException(&__pyx_t_8, &__pyx_t_7, &__pyx_t_10) < 0) __PYX_ERR(0, 25, __pyx_L11_except_error) - __Pyx_XGOTREF(__pyx_t_8); - __Pyx_XGOTREF(__pyx_t_7); - __Pyx_XGOTREF(__pyx_t_10); - - /* "aiohttp/_helpers.pyx":26 - * return inst._cache[self.name] - * except KeyError: - * val = self.wrapped(inst) # <<<<<<<<<<<<<< - * inst._cache[self.name] = val - * return val + } + + /* "aiohttp/_helpers.pyx":27 + * if inst is None: + * return self + * cdef dict cache = inst._cache # <<<<<<<<<<<<<< + * val = cache.get(self.name, _sentinel) + * if val is _sentinel: */ - __Pyx_INCREF(__pyx_v_self->wrapped); - __pyx_t_12 = __pyx_v_self->wrapped; __pyx_t_13 = NULL; - __pyx_t_9 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_12))) { - __pyx_t_13 = PyMethod_GET_SELF(__pyx_t_12); - if (likely(__pyx_t_13)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_12); - __Pyx_INCREF(__pyx_t_13); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_12, function); - __pyx_t_9 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_13, __pyx_v_inst}; - __pyx_t_11 = __Pyx_PyObject_FastCall(__pyx_t_12, __pyx_callargs+1-__pyx_t_9, 1+__pyx_t_9); - __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; - if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 26, __pyx_L11_except_error) - __Pyx_GOTREF(__pyx_t_11); - __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; - } - __pyx_v_val = __pyx_t_11; - __pyx_t_11 = 0; - - /* "aiohttp/_helpers.pyx":27 - * except KeyError: - * val = self.wrapped(inst) - * inst._cache[self.name] = val # <<<<<<<<<<<<<< - * return val - * except AttributeError: + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_inst, __pyx_n_s_cache); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 27, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (!(likely(PyDict_CheckExact(__pyx_t_2))||((__pyx_t_2) == Py_None) || __Pyx_RaiseUnexpectedTypeError("dict", __pyx_t_2))) __PYX_ERR(0, 27, __pyx_L1_error) + __pyx_v_cache = ((PyObject*)__pyx_t_2); + __pyx_t_2 = 0; + + /* "aiohttp/_helpers.pyx":28 + * return self + * cdef dict cache = inst._cache + * val = cache.get(self.name, _sentinel) # <<<<<<<<<<<<<< + * if val is _sentinel: + * val = self.wrapped(inst) */ - __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_v_inst, __pyx_n_s_cache); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 27, __pyx_L11_except_error) - __Pyx_GOTREF(__pyx_t_11); - if (unlikely((PyObject_SetItem(__pyx_t_11, __pyx_v_self->name, __pyx_v_val) < 0))) __PYX_ERR(0, 27, __pyx_L11_except_error) - __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - - /* "aiohttp/_helpers.pyx":28 - * val = self.wrapped(inst) - * inst._cache[self.name] = val - * return val # <<<<<<<<<<<<<< - * except AttributeError: - * if inst is None: + if (unlikely(__pyx_v_cache == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "get"); + __PYX_ERR(0, 28, __pyx_L1_error) + } + __pyx_t_2 = __Pyx_PyDict_GetItemDefault(__pyx_v_cache, __pyx_v_self->name, __pyx_v_7aiohttp_8_helpers__sentinel); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 28, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_v_val = __pyx_t_2; + __pyx_t_2 = 0; + + /* "aiohttp/_helpers.pyx":29 + * cdef dict cache = inst._cache + * val = cache.get(self.name, _sentinel) + * if val is _sentinel: # <<<<<<<<<<<<<< + * val = self.wrapped(inst) + * cache[self.name] = val */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_val); - __pyx_r = __pyx_v_val; - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - goto __pyx_L12_except_return; - } - goto __pyx_L11_except_error; - - /* "aiohttp/_helpers.pyx":23 - * def __get__(self, inst, owner): - * try: - * try: # <<<<<<<<<<<<<< - * return inst._cache[self.name] - * except KeyError: + __pyx_t_1 = (__pyx_v_val == __pyx_v_7aiohttp_8_helpers__sentinel); + if (__pyx_t_1) { + + /* "aiohttp/_helpers.pyx":30 + * val = cache.get(self.name, _sentinel) + * if val is _sentinel: + * val = self.wrapped(inst) # <<<<<<<<<<<<<< + * cache[self.name] = val + * return val */ - __pyx_L11_except_error:; - __Pyx_XGIVEREF(__pyx_t_4); - __Pyx_XGIVEREF(__pyx_t_5); - __Pyx_XGIVEREF(__pyx_t_6); - __Pyx_ExceptionReset(__pyx_t_4, __pyx_t_5, __pyx_t_6); - goto __pyx_L3_error; - __pyx_L13_try_return:; - __Pyx_XGIVEREF(__pyx_t_4); - __Pyx_XGIVEREF(__pyx_t_5); - __Pyx_XGIVEREF(__pyx_t_6); - __Pyx_ExceptionReset(__pyx_t_4, __pyx_t_5, __pyx_t_6); - goto __pyx_L7_try_return; - __pyx_L12_except_return:; - __Pyx_XGIVEREF(__pyx_t_4); - __Pyx_XGIVEREF(__pyx_t_5); - __Pyx_XGIVEREF(__pyx_t_6); - __Pyx_ExceptionReset(__pyx_t_4, __pyx_t_5, __pyx_t_6); - goto __pyx_L7_try_return; + __Pyx_INCREF(__pyx_v_self->wrapped); + __pyx_t_3 = __pyx_v_self->wrapped; __pyx_t_4 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_5 = 1; } - - /* "aiohttp/_helpers.pyx":22 + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v_inst}; + __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 30, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __Pyx_DECREF_SET(__pyx_v_val, __pyx_t_2); + __pyx_t_2 = 0; + + /* "aiohttp/_helpers.pyx":31 + * if val is _sentinel: + * val = self.wrapped(inst) + * cache[self.name] = val # <<<<<<<<<<<<<< + * return val * - * def __get__(self, inst, owner): - * try: # <<<<<<<<<<<<<< - * try: - * return inst._cache[self.name] */ + if (unlikely(__pyx_v_cache == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 31, __pyx_L1_error) } - __pyx_L3_error:; - __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; - __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; - __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; - __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; - __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + if (unlikely((PyDict_SetItem(__pyx_v_cache, __pyx_v_self->name, __pyx_v_val) < 0))) __PYX_ERR(0, 31, __pyx_L1_error) /* "aiohttp/_helpers.pyx":29 - * inst._cache[self.name] = val - * return val - * except AttributeError: # <<<<<<<<<<<<<< - * if inst is None: - * return self - */ - __pyx_t_9 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_AttributeError); - if (__pyx_t_9) { - __Pyx_AddTraceback("aiohttp._helpers.reify.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); - if (__Pyx_GetException(&__pyx_t_10, &__pyx_t_7, &__pyx_t_8) < 0) __PYX_ERR(0, 29, __pyx_L5_except_error) - __Pyx_XGOTREF(__pyx_t_10); - __Pyx_XGOTREF(__pyx_t_7); - __Pyx_XGOTREF(__pyx_t_8); - - /* "aiohttp/_helpers.pyx":30 - * return val - * except AttributeError: - * if inst is None: # <<<<<<<<<<<<<< - * return self - * raise - */ - __pyx_t_14 = (__pyx_v_inst == Py_None); - if (__pyx_t_14) { - - /* "aiohttp/_helpers.pyx":31 - * except AttributeError: - * if inst is None: - * return self # <<<<<<<<<<<<<< - * raise - * - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF((PyObject *)__pyx_v_self); - __pyx_r = ((PyObject *)__pyx_v_self); - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - goto __pyx_L6_except_return; - - /* "aiohttp/_helpers.pyx":30 - * return val - * except AttributeError: - * if inst is None: # <<<<<<<<<<<<<< - * return self - * raise + * cdef dict cache = inst._cache + * val = cache.get(self.name, _sentinel) + * if val is _sentinel: # <<<<<<<<<<<<<< + * val = self.wrapped(inst) + * cache[self.name] = val */ - } + } - /* "aiohttp/_helpers.pyx":32 - * if inst is None: - * return self - * raise # <<<<<<<<<<<<<< + /* "aiohttp/_helpers.pyx":32 + * val = self.wrapped(inst) + * cache[self.name] = val + * return val # <<<<<<<<<<<<<< * * def __set__(self, inst, value): */ - __Pyx_GIVEREF(__pyx_t_10); - __Pyx_GIVEREF(__pyx_t_7); - __Pyx_XGIVEREF(__pyx_t_8); - __Pyx_ErrRestoreWithState(__pyx_t_10, __pyx_t_7, __pyx_t_8); - __pyx_t_10 = 0; __pyx_t_7 = 0; __pyx_t_8 = 0; - __PYX_ERR(0, 32, __pyx_L5_except_error) - } - goto __pyx_L5_except_error; - - /* "aiohttp/_helpers.pyx":22 - * - * def __get__(self, inst, owner): - * try: # <<<<<<<<<<<<<< - * try: - * return inst._cache[self.name] - */ - __pyx_L5_except_error:; - __Pyx_XGIVEREF(__pyx_t_1); - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); - goto __pyx_L1_error; - __pyx_L7_try_return:; - __Pyx_XGIVEREF(__pyx_t_1); - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); - goto __pyx_L0; - __pyx_L6_except_return:; - __Pyx_XGIVEREF(__pyx_t_1); - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); - goto __pyx_L0; - } + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_val); + __pyx_r = __pyx_v_val; + goto __pyx_L0; - /* "aiohttp/_helpers.pyx":21 + /* "aiohttp/_helpers.pyx":24 * return self.wrapped.__doc__ * * def __get__(self, inst, owner): # <<<<<<<<<<<<<< - * try: - * try: + * if inst is None: + * return self */ /* function exit code */ __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_7); - __Pyx_XDECREF(__pyx_t_8); - __Pyx_XDECREF(__pyx_t_10); - __Pyx_XDECREF(__pyx_t_11); - __Pyx_XDECREF(__pyx_t_12); - __Pyx_XDECREF(__pyx_t_13); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); __Pyx_AddTraceback("aiohttp._helpers.reify.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; + __Pyx_XDECREF(__pyx_v_cache); __Pyx_XDECREF(__pyx_v_val); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); @@ -3137,7 +3011,7 @@ static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_2__get__(struct __pyx_obj_7a } /* "aiohttp/_helpers.pyx":34 - * raise + * return val * * def __set__(self, inst, value): # <<<<<<<<<<<<<< * raise AttributeError("reified property is read-only") @@ -3179,7 +3053,7 @@ static int __pyx_pf_7aiohttp_8_helpers_5reify_4__set__(CYTHON_UNUSED struct __py __PYX_ERR(0, 35, __pyx_L1_error) /* "aiohttp/_helpers.pyx":34 - * raise + * return val * * def __set__(self, inst, value): # <<<<<<<<<<<<<< * raise AttributeError("reified property is read-only") @@ -4282,14 +4156,12 @@ static int __Pyx_CreateStringTabAndInitStrings(void) { __Pyx_StringTabEntry __pyx_string_tab[] = { {&__pyx_n_s_AttributeError, __pyx_k_AttributeError, sizeof(__pyx_k_AttributeError), 0, 0, 1, 1}, {&__pyx_kp_s_Incompatible_checksums_0x_x_vs_0, __pyx_k_Incompatible_checksums_0x_x_vs_0, sizeof(__pyx_k_Incompatible_checksums_0x_x_vs_0), 0, 0, 1, 0}, - {&__pyx_n_s_KeyError, __pyx_k_KeyError, sizeof(__pyx_k_KeyError), 0, 0, 1, 1}, {&__pyx_n_s_PickleError, __pyx_k_PickleError, sizeof(__pyx_k_PickleError), 0, 0, 1, 1}, {&__pyx_n_s__10, __pyx_k__10, sizeof(__pyx_k__10), 0, 0, 1, 1}, {&__pyx_kp_u__3, __pyx_k__3, sizeof(__pyx_k__3), 0, 1, 0, 0}, {&__pyx_n_s_aiohttp__helpers, __pyx_k_aiohttp__helpers, sizeof(__pyx_k_aiohttp__helpers), 0, 0, 1, 1}, {&__pyx_n_s_asyncio_coroutines, __pyx_k_asyncio_coroutines, sizeof(__pyx_k_asyncio_coroutines), 0, 0, 1, 1}, {&__pyx_n_s_cache, __pyx_k_cache, sizeof(__pyx_k_cache), 0, 0, 1, 1}, - {&__pyx_n_s_class_getitem, __pyx_k_class_getitem, sizeof(__pyx_k_class_getitem), 0, 0, 1, 1}, {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, {&__pyx_n_s_dict, __pyx_k_dict, sizeof(__pyx_k_dict), 0, 0, 1, 1}, {&__pyx_n_s_dict_2, __pyx_k_dict_2, sizeof(__pyx_k_dict_2), 0, 0, 1, 1}, @@ -4297,6 +4169,7 @@ static int __Pyx_CreateStringTabAndInitStrings(void) { {&__pyx_n_s_doc, __pyx_k_doc, sizeof(__pyx_k_doc), 0, 0, 1, 1}, {&__pyx_kp_u_enable, __pyx_k_enable, sizeof(__pyx_k_enable), 0, 1, 0, 0}, {&__pyx_kp_u_gc, __pyx_k_gc, sizeof(__pyx_k_gc), 0, 1, 0, 0}, + {&__pyx_n_s_get, __pyx_k_get, sizeof(__pyx_k_get), 0, 0, 1, 1}, {&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1}, {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, {&__pyx_n_s_is_coroutine, __pyx_k_is_coroutine, sizeof(__pyx_k_is_coroutine), 0, 0, 1, 1}, @@ -4304,6 +4177,7 @@ static int __Pyx_CreateStringTabAndInitStrings(void) { {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, {&__pyx_n_s_new, __pyx_k_new, sizeof(__pyx_k_new), 0, 0, 1, 1}, + {&__pyx_n_s_object, __pyx_k_object, sizeof(__pyx_k_object), 0, 0, 1, 1}, {&__pyx_n_s_pickle, __pyx_k_pickle, sizeof(__pyx_k_pickle), 0, 0, 1, 1}, {&__pyx_n_s_pyx_PickleError, __pyx_k_pyx_PickleError, sizeof(__pyx_k_pyx_PickleError), 0, 0, 1, 1}, {&__pyx_n_s_pyx_checksum, __pyx_k_pyx_checksum, sizeof(__pyx_k_pyx_checksum), 0, 0, 1, 1}, @@ -4333,8 +4207,8 @@ static int __Pyx_CreateStringTabAndInitStrings(void) { } /* #### Code section: cached_builtins ### */ static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { - __pyx_builtin_KeyError = __Pyx_GetBuiltinName(__pyx_n_s_KeyError); if (!__pyx_builtin_KeyError) __PYX_ERR(0, 25, __pyx_L1_error) - __pyx_builtin_AttributeError = __Pyx_GetBuiltinName(__pyx_n_s_AttributeError); if (!__pyx_builtin_AttributeError) __PYX_ERR(0, 29, __pyx_L1_error) + __pyx_builtin_object = __Pyx_GetBuiltinName(__pyx_n_s_object); if (!__pyx_builtin_object) __PYX_ERR(0, 2, __pyx_L1_error) + __pyx_builtin_AttributeError = __Pyx_GetBuiltinName(__pyx_n_s_AttributeError); if (!__pyx_builtin_AttributeError) __PYX_ERR(0, 35, __pyx_L1_error) return 0; __pyx_L1_error:; return -1; @@ -4404,10 +4278,12 @@ static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { /* #### Code section: init_constants ### */ static CYTHON_SMALL_CODE int __Pyx_InitConstants(void) { - if (__Pyx_CreateStringTabAndInitStrings() < 0) __PYX_ERR(0, 1, __pyx_L1_error); - __pyx_int_88416349 = PyInt_FromLong(88416349L); if (unlikely(!__pyx_int_88416349)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_int_124832655 = PyInt_FromLong(124832655L); if (unlikely(!__pyx_int_124832655)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_int_250410337 = PyInt_FromLong(250410337L); if (unlikely(!__pyx_int_250410337)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_umethod_PyDict_Type_get.type = (PyObject*)&PyDict_Type; + __pyx_umethod_PyDict_Type_get.method_name = &__pyx_n_s_get; + if (__Pyx_CreateStringTabAndInitStrings() < 0) __PYX_ERR(0, 2, __pyx_L1_error); + __pyx_int_88416349 = PyInt_FromLong(88416349L); if (unlikely(!__pyx_int_88416349)) __PYX_ERR(0, 2, __pyx_L1_error) + __pyx_int_124832655 = PyInt_FromLong(124832655L); if (unlikely(!__pyx_int_124832655)) __PYX_ERR(0, 2, __pyx_L1_error) + __pyx_int_250410337 = PyInt_FromLong(250410337L); if (unlikely(!__pyx_int_250410337)) __PYX_ERR(0, 2, __pyx_L1_error) return 0; __pyx_L1_error:; return -1; @@ -4431,6 +4307,7 @@ static int __Pyx_modinit_global_init_code(void) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); /*--- Global init code ---*/ + __pyx_v_7aiohttp_8_helpers__sentinel = Py_None; Py_INCREF(Py_None); __Pyx_RefNannyFinishContext(); return 0; } @@ -4459,15 +4336,15 @@ static int __Pyx_modinit_type_init_code(void) { __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); /*--- Type init code ---*/ #if CYTHON_USE_TYPE_SPECS - __pyx_ptype_7aiohttp_8_helpers_reify = (PyTypeObject *) __Pyx_PyType_FromModuleAndSpec(__pyx_m, &__pyx_type_7aiohttp_8_helpers_reify_spec, NULL); if (unlikely(!__pyx_ptype_7aiohttp_8_helpers_reify)) __PYX_ERR(0, 1, __pyx_L1_error) - if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_7aiohttp_8_helpers_reify_spec, __pyx_ptype_7aiohttp_8_helpers_reify) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_ptype_7aiohttp_8_helpers_reify = (PyTypeObject *) __Pyx_PyType_FromModuleAndSpec(__pyx_m, &__pyx_type_7aiohttp_8_helpers_reify_spec, NULL); if (unlikely(!__pyx_ptype_7aiohttp_8_helpers_reify)) __PYX_ERR(0, 4, __pyx_L1_error) + if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_7aiohttp_8_helpers_reify_spec, __pyx_ptype_7aiohttp_8_helpers_reify) < 0) __PYX_ERR(0, 4, __pyx_L1_error) #else __pyx_ptype_7aiohttp_8_helpers_reify = &__pyx_type_7aiohttp_8_helpers_reify; #endif #if !CYTHON_COMPILING_IN_LIMITED_API #endif #if !CYTHON_USE_TYPE_SPECS - if (__Pyx_PyType_Ready(__pyx_ptype_7aiohttp_8_helpers_reify) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_PyType_Ready(__pyx_ptype_7aiohttp_8_helpers_reify) < 0) __PYX_ERR(0, 4, __pyx_L1_error) #endif #if PY_MAJOR_VERSION < 3 __pyx_ptype_7aiohttp_8_helpers_reify->tp_print = 0; @@ -4477,9 +4354,9 @@ static int __Pyx_modinit_type_init_code(void) { __pyx_ptype_7aiohttp_8_helpers_reify->tp_getattro = __Pyx_PyObject_GenericGetAttr; } #endif - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_reify, (PyObject *) __pyx_ptype_7aiohttp_8_helpers_reify) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_reify, (PyObject *) __pyx_ptype_7aiohttp_8_helpers_reify) < 0) __PYX_ERR(0, 4, __pyx_L1_error) #if !CYTHON_COMPILING_IN_LIMITED_API - if (__Pyx_setup_reduce((PyObject *) __pyx_ptype_7aiohttp_8_helpers_reify) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject *) __pyx_ptype_7aiohttp_8_helpers_reify) < 0) __PYX_ERR(0, 4, __pyx_L1_error) #endif __Pyx_RefNannyFinishContext(); return 0; @@ -4695,26 +4572,26 @@ static CYTHON_SMALL_CODE int __pyx_pymod_exec__helpers(PyObject *__pyx_pyinit_mo #else #if PY_MAJOR_VERSION < 3 __pyx_m = Py_InitModule4("_helpers", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); - if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) + if (unlikely(!__pyx_m)) __PYX_ERR(0, 2, __pyx_L1_error) #elif CYTHON_USE_MODULE_STATE - __pyx_t_1 = PyModule_Create(&__pyx_moduledef); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_t_1 = PyModule_Create(&__pyx_moduledef); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 2, __pyx_L1_error) { int add_module_result = PyState_AddModule(__pyx_t_1, &__pyx_moduledef); - __pyx_t_1 = 0; /* transfer ownership from __pyx_t_1 to _helpers pseudovariable */ - if (unlikely((add_module_result < 0))) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_t_1 = 0; /* transfer ownership from __pyx_t_1 to "_helpers" pseudovariable */ + if (unlikely((add_module_result < 0))) __PYX_ERR(0, 2, __pyx_L1_error) pystate_addmodule_run = 1; } #else __pyx_m = PyModule_Create(&__pyx_moduledef); - if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) + if (unlikely(!__pyx_m)) __PYX_ERR(0, 2, __pyx_L1_error) #endif #endif CYTHON_UNUSED_VAR(__pyx_t_1); - __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 2, __pyx_L1_error) Py_INCREF(__pyx_d); - __pyx_b = __Pyx_PyImport_AddModuleRef(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_cython_runtime = __Pyx_PyImport_AddModuleRef((const char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) - if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_b = __Pyx_PyImport_AddModuleRef(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 2, __pyx_L1_error) + __pyx_cython_runtime = __Pyx_PyImport_AddModuleRef((const char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 2, __pyx_L1_error) + if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 2, __pyx_L1_error) #if CYTHON_REFNANNY __Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); if (!__Pyx_RefNanny) { @@ -4725,30 +4602,30 @@ if (!__Pyx_RefNanny) { } #endif __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit__helpers(void)", 0); - if (__Pyx_check_binary_version(__PYX_LIMITED_VERSION_HEX, __Pyx_get_runtime_version(), CYTHON_COMPILING_IN_LIMITED_API) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_check_binary_version(__PYX_LIMITED_VERSION_HEX, __Pyx_get_runtime_version(), CYTHON_COMPILING_IN_LIMITED_API) < 0) __PYX_ERR(0, 2, __pyx_L1_error) #ifdef __Pxy_PyFrame_Initialize_Offsets __Pxy_PyFrame_Initialize_Offsets(); #endif - __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 2, __pyx_L1_error) + __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 2, __pyx_L1_error) + __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 2, __pyx_L1_error) #ifdef __Pyx_CyFunction_USED - if (__pyx_CyFunction_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__pyx_CyFunction_init(__pyx_m) < 0) __PYX_ERR(0, 2, __pyx_L1_error) #endif #ifdef __Pyx_FusedFunction_USED - if (__pyx_FusedFunction_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__pyx_FusedFunction_init(__pyx_m) < 0) __PYX_ERR(0, 2, __pyx_L1_error) #endif #ifdef __Pyx_Coroutine_USED - if (__pyx_Coroutine_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__pyx_Coroutine_init(__pyx_m) < 0) __PYX_ERR(0, 2, __pyx_L1_error) #endif #ifdef __Pyx_Generator_USED - if (__pyx_Generator_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__pyx_Generator_init(__pyx_m) < 0) __PYX_ERR(0, 2, __pyx_L1_error) #endif #ifdef __Pyx_AsyncGen_USED - if (__pyx_AsyncGen_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__pyx_AsyncGen_init(__pyx_m) < 0) __PYX_ERR(0, 2, __pyx_L1_error) #endif #ifdef __Pyx_StopAsyncIteration_USED - if (__pyx_StopAsyncIteration_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__pyx_StopAsyncIteration_init(__pyx_m) < 0) __PYX_ERR(0, 2, __pyx_L1_error) #endif /*--- Library function declarations ---*/ /*--- Threads initialization code ---*/ @@ -4756,40 +4633,53 @@ if (!__Pyx_RefNanny) { PyEval_InitThreads(); #endif /*--- Initialize various global constants etc. ---*/ - if (__Pyx_InitConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_InitConstants() < 0) __PYX_ERR(0, 2, __pyx_L1_error) stringtab_initialized = 1; - if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 2, __pyx_L1_error) #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) - if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 2, __pyx_L1_error) #endif if (__pyx_module_is_main_aiohttp___helpers) { - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 2, __pyx_L1_error) } #if PY_MAJOR_VERSION >= 3 { - PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) + PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 2, __pyx_L1_error) if (!PyDict_GetItemString(modules, "aiohttp._helpers")) { - if (unlikely((PyDict_SetItemString(modules, "aiohttp._helpers", __pyx_m) < 0))) __PYX_ERR(0, 1, __pyx_L1_error) + if (unlikely((PyDict_SetItemString(modules, "aiohttp._helpers", __pyx_m) < 0))) __PYX_ERR(0, 2, __pyx_L1_error) } } #endif /*--- Builtin init code ---*/ - if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 2, __pyx_L1_error) /*--- Constants init code ---*/ - if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 2, __pyx_L1_error) /*--- Global type/function init code ---*/ (void)__Pyx_modinit_global_init_code(); (void)__Pyx_modinit_variable_export_code(); (void)__Pyx_modinit_function_export_code(); - if (unlikely((__Pyx_modinit_type_init_code() < 0))) __PYX_ERR(0, 1, __pyx_L1_error) + if (unlikely((__Pyx_modinit_type_init_code() < 0))) __PYX_ERR(0, 2, __pyx_L1_error) (void)__Pyx_modinit_type_import_code(); (void)__Pyx_modinit_variable_import_code(); (void)__Pyx_modinit_function_import_code(); /*--- Execution code ---*/ #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) - if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 2, __pyx_L1_error) #endif + /* "aiohttp/_helpers.pyx":2 + * + * cdef _sentinel = object() # <<<<<<<<<<<<<< + * + * cdef class reify: + */ + __pyx_t_2 = __Pyx_PyObject_CallNoArg(__pyx_builtin_object); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_v_7aiohttp_8_helpers__sentinel); + __Pyx_DECREF_SET(__pyx_v_7aiohttp_8_helpers__sentinel, __pyx_t_2); + __Pyx_GIVEREF(__pyx_t_2); + __pyx_t_2 = 0; + /* "(tree fragment)":1 * def __reduce_cython__(self): # <<<<<<<<<<<<<< * cdef tuple state @@ -4823,14 +4713,15 @@ if (!__Pyx_RefNanny) { if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_reify, __pyx_t_2) < 0) __PYX_ERR(1, 1, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - /* "aiohttp/_helpers.pyx":1 - * cdef class reify: # <<<<<<<<<<<<<< - * """Use as a class method decorator. It operates almost exactly like - * the Python `@property` decorator, but it puts the result of the + /* "aiohttp/_helpers.pyx":2 + * + * cdef _sentinel = object() # <<<<<<<<<<<<<< + * + * cdef class reify: */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_2) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_2) < 0) __PYX_ERR(0, 2, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; /*--- Wrapped vars code ---*/ @@ -5259,14 +5150,14 @@ static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyO { int eq = __Pyx_PyUnicode_Equals(s, PyTuple_GET_ITEM(kwnames, i), Py_EQ); if (unlikely(eq != 0)) { - if (unlikely(eq < 0)) return NULL; // error + if (unlikely(eq < 0)) return NULL; return kwvalues[i]; } } - return NULL; // not found (no exception set) + return NULL; } #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 -static CYTHON_UNUSED PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues) { +CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues) { Py_ssize_t i, nkwargs = PyTuple_GET_SIZE(kwnames); PyObject *dict; dict = PyDict_New(); @@ -5350,7 +5241,7 @@ static int __Pyx_ParseOptionalKeywords( if (*name) { values[name-argnames] = value; #if CYTHON_AVOID_BORROWED_REFS - Py_INCREF(value); // transfer ownership of value to values + Py_INCREF(value); Py_DECREF(key); #endif key = NULL; @@ -5369,7 +5260,7 @@ static int __Pyx_ParseOptionalKeywords( && _PyString_Eq(**name, key)) { values[name-argnames] = value; #if CYTHON_AVOID_BORROWED_REFS - value = NULL; // ownership transferred to values + value = NULL; #endif break; } @@ -5401,7 +5292,7 @@ static int __Pyx_ParseOptionalKeywords( if (cmp == 0) { values[name-argnames] = value; #if CYTHON_AVOID_BORROWED_REFS - value = NULL; // ownership transferred to values + value = NULL; #endif break; } @@ -5481,99 +5372,240 @@ static void __Pyx_RaiseArgtupleInvalid( (num_expected == 1) ? "" : "s", num_found); } -/* GetItemInt */ -static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { - PyObject *r; - if (unlikely(!j)) return NULL; - r = PyObject_GetItem(o, j); - Py_DECREF(j); - return r; +/* RaiseUnexpectedTypeError */ +static int +__Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj) +{ + __Pyx_TypeName obj_type_name = __Pyx_PyType_GetName(Py_TYPE(obj)); + PyErr_Format(PyExc_TypeError, "Expected %s, got " __Pyx_FMT_TYPENAME, + expected, obj_type_name); + __Pyx_DECREF_TypeName(obj_type_name); + return 0; } -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, - CYTHON_NCP_UNUSED int wraparound, - CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - Py_ssize_t wrapped_i = i; - if (wraparound & unlikely(i < 0)) { - wrapped_i += PyList_GET_SIZE(o); - } - if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyList_GET_SIZE(o)))) { - PyObject *r = PyList_GET_ITEM(o, wrapped_i); - Py_INCREF(r); - return r; + +/* PyObjectCall */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *result; + ternaryfunc call = Py_TYPE(func)->tp_call; + if (unlikely(!call)) + return PyObject_Call(func, arg, kw); + #if PY_MAJOR_VERSION < 3 + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + #else + if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) + return NULL; + #endif + result = (*call)(func, arg, kw); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); } - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -#else - return PySequence_GetItem(o, i); + return result; +} #endif + +/* UnpackUnboundCMethod */ +static PyObject *__Pyx_SelflessCall(PyObject *method, PyObject *args, PyObject *kwargs) { + PyObject *result; + PyObject *selfless_args = PyTuple_GetSlice(args, 1, PyTuple_Size(args)); + if (unlikely(!selfless_args)) return NULL; + result = PyObject_Call(method, selfless_args, kwargs); + Py_DECREF(selfless_args); + return result; } -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, - CYTHON_NCP_UNUSED int wraparound, - CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - Py_ssize_t wrapped_i = i; - if (wraparound & unlikely(i < 0)) { - wrapped_i += PyTuple_GET_SIZE(o); +static PyMethodDef __Pyx_UnboundCMethod_Def = { + "CythonUnboundCMethod", + __PYX_REINTERPRET_FUNCION(PyCFunction, __Pyx_SelflessCall), + METH_VARARGS | METH_KEYWORDS, + NULL +}; +static int __Pyx_TryUnpackUnboundCMethod(__Pyx_CachedCFunction* target) { + PyObject *method; + method = __Pyx_PyObject_GetAttrStr(target->type, *target->method_name); + if (unlikely(!method)) + return -1; + target->method = method; +#if CYTHON_COMPILING_IN_CPYTHON + #if PY_MAJOR_VERSION >= 3 + if (likely(__Pyx_TypeCheck(method, &PyMethodDescr_Type))) + #else + if (likely(!__Pyx_CyOrPyCFunction_Check(method))) + #endif + { + PyMethodDescrObject *descr = (PyMethodDescrObject*) method; + target->func = descr->d_method->ml_meth; + target->flag = descr->d_method->ml_flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_STACKLESS); + } else +#endif +#if CYTHON_COMPILING_IN_PYPY +#else + if (PyCFunction_Check(method)) +#endif + { + PyObject *self; + int self_found; +#if CYTHON_COMPILING_IN_LIMITED_API || CYTHON_COMPILING_IN_PYPY + self = PyObject_GetAttrString(method, "__self__"); + if (!self) { + PyErr_Clear(); + } +#else + self = PyCFunction_GET_SELF(method); +#endif + self_found = (self && self != Py_None); +#if CYTHON_COMPILING_IN_LIMITED_API || CYTHON_COMPILING_IN_PYPY + Py_XDECREF(self); +#endif + if (self_found) { + PyObject *unbound_method = PyCFunction_New(&__Pyx_UnboundCMethod_Def, method); + if (unlikely(!unbound_method)) return -1; + Py_DECREF(method); + target->method = unbound_method; + } } - if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyTuple_GET_SIZE(o)))) { - PyObject *r = PyTuple_GET_ITEM(o, wrapped_i); - Py_INCREF(r); - return r; + return 0; +} + +/* CallUnboundCMethod1 */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg) { + if (likely(cfunc->func)) { + int flag = cfunc->flag; + if (flag == METH_O) { + return (*(cfunc->func))(self, arg); + } else if ((PY_VERSION_HEX >= 0x030600B1) && flag == METH_FASTCALL) { + #if PY_VERSION_HEX >= 0x030700A0 + return (*(__Pyx_PyCFunctionFast)(void*)(PyCFunction)cfunc->func)(self, &arg, 1); + #else + return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, &arg, 1, NULL); + #endif + } else if ((PY_VERSION_HEX >= 0x030700A0) && flag == (METH_FASTCALL | METH_KEYWORDS)) { + return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, &arg, 1, NULL); + } + } + return __Pyx__CallUnboundCMethod1(cfunc, self, arg); +} +#endif +static PyObject* __Pyx__CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg){ + PyObject *args, *result = NULL; + if (unlikely(!cfunc->func && !cfunc->method) && unlikely(__Pyx_TryUnpackUnboundCMethod(cfunc) < 0)) return NULL; +#if CYTHON_COMPILING_IN_CPYTHON + if (cfunc->func && (cfunc->flag & METH_VARARGS)) { + args = PyTuple_New(1); + if (unlikely(!args)) goto bad; + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 0, arg); + if (cfunc->flag & METH_KEYWORDS) + result = (*(PyCFunctionWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, NULL); + else + result = (*cfunc->func)(self, args); + } else { + args = PyTuple_New(2); + if (unlikely(!args)) goto bad; + Py_INCREF(self); + PyTuple_SET_ITEM(args, 0, self); + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 1, arg); + result = __Pyx_PyObject_Call(cfunc->method, args, NULL); } - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); #else - return PySequence_GetItem(o, i); + args = PyTuple_Pack(2, self, arg); + if (unlikely(!args)) goto bad; + result = __Pyx_PyObject_Call(cfunc->method, args, NULL); #endif +bad: + Py_XDECREF(args); + return result; } -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, - CYTHON_NCP_UNUSED int wraparound, - CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS - if (is_list || PyList_CheckExact(o)) { - Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o); - if ((!boundscheck) || (likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o))))) { - PyObject *r = PyList_GET_ITEM(o, n); - Py_INCREF(r); - return r; + +/* CallUnboundCMethod2 */ +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030600B1 +static CYTHON_INLINE PyObject *__Pyx_CallUnboundCMethod2(__Pyx_CachedCFunction *cfunc, PyObject *self, PyObject *arg1, PyObject *arg2) { + if (likely(cfunc->func)) { + PyObject *args[2] = {arg1, arg2}; + if (cfunc->flag == METH_FASTCALL) { + #if PY_VERSION_HEX >= 0x030700A0 + return (*(__Pyx_PyCFunctionFast)(void*)(PyCFunction)cfunc->func)(self, args, 2); + #else + return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, 2, NULL); + #endif } + #if PY_VERSION_HEX >= 0x030700A0 + if (cfunc->flag == (METH_FASTCALL | METH_KEYWORDS)) + return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, 2, NULL); + #endif } - else if (PyTuple_CheckExact(o)) { - Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); - if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyTuple_GET_SIZE(o)))) { - PyObject *r = PyTuple_GET_ITEM(o, n); - Py_INCREF(r); - return r; - } + return __Pyx__CallUnboundCMethod2(cfunc, self, arg1, arg2); +} +#endif +static PyObject* __Pyx__CallUnboundCMethod2(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg1, PyObject* arg2){ + PyObject *args, *result = NULL; + if (unlikely(!cfunc->func && !cfunc->method) && unlikely(__Pyx_TryUnpackUnboundCMethod(cfunc) < 0)) return NULL; +#if CYTHON_COMPILING_IN_CPYTHON + if (cfunc->func && (cfunc->flag & METH_VARARGS)) { + args = PyTuple_New(2); + if (unlikely(!args)) goto bad; + Py_INCREF(arg1); + PyTuple_SET_ITEM(args, 0, arg1); + Py_INCREF(arg2); + PyTuple_SET_ITEM(args, 1, arg2); + if (cfunc->flag & METH_KEYWORDS) + result = (*(PyCFunctionWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, NULL); + else + result = (*cfunc->func)(self, args); } else { - PyMappingMethods *mm = Py_TYPE(o)->tp_as_mapping; - PySequenceMethods *sm = Py_TYPE(o)->tp_as_sequence; - if (mm && mm->mp_subscript) { - PyObject *r, *key = PyInt_FromSsize_t(i); - if (unlikely(!key)) return NULL; - r = mm->mp_subscript(o, key); - Py_DECREF(key); - return r; - } - if (likely(sm && sm->sq_item)) { - if (wraparound && unlikely(i < 0) && likely(sm->sq_length)) { - Py_ssize_t l = sm->sq_length(o); - if (likely(l >= 0)) { - i += l; - } else { - if (!PyErr_ExceptionMatches(PyExc_OverflowError)) - return NULL; - PyErr_Clear(); - } - } - return sm->sq_item(o, i); - } + args = PyTuple_New(3); + if (unlikely(!args)) goto bad; + Py_INCREF(self); + PyTuple_SET_ITEM(args, 0, self); + Py_INCREF(arg1); + PyTuple_SET_ITEM(args, 1, arg1); + Py_INCREF(arg2); + PyTuple_SET_ITEM(args, 2, arg2); + result = __Pyx_PyObject_Call(cfunc->method, args, NULL); } #else - if (is_list || PySequence_Check(o)) { - return PySequence_GetItem(o, i); + args = PyTuple_Pack(3, self, arg1, arg2); + if (unlikely(!args)) goto bad; + result = __Pyx_PyObject_Call(cfunc->method, args, NULL); +#endif +bad: + Py_XDECREF(args); + return result; +} + +/* dict_getitem_default */ +static PyObject* __Pyx_PyDict_GetItemDefault(PyObject* d, PyObject* key, PyObject* default_value) { + PyObject* value; +#if PY_MAJOR_VERSION >= 3 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07020000) + value = PyDict_GetItemWithError(d, key); + if (unlikely(!value)) { + if (unlikely(PyErr_Occurred())) + return NULL; + value = default_value; + } + Py_INCREF(value); + if ((1)); +#else + if (PyString_CheckExact(key) || PyUnicode_CheckExact(key) || PyInt_CheckExact(key)) { + value = PyDict_GetItem(d, key); + if (unlikely(!value)) { + value = default_value; + } + Py_INCREF(value); } #endif - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); + else { + if (default_value == Py_None) + value = __Pyx_CallUnboundCMethod1(&__pyx_umethod_PyDict_Type_get, d, key); + else + value = __Pyx_CallUnboundCMethod2(&__pyx_umethod_PyDict_Type_get, d, key, default_value); + } + return value; } /* PyFunctionFastCall */ @@ -5699,31 +5731,6 @@ static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, } #endif -/* PyObjectCall */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { - PyObject *result; - ternaryfunc call = Py_TYPE(func)->tp_call; - if (unlikely(!call)) - return PyObject_Call(func, arg, kw); - #if PY_MAJOR_VERSION < 3 - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - #else - if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) - return NULL; - #endif - result = (*call)(func, arg, kw); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - /* PyObjectCallMethO */ #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { @@ -5827,239 +5834,6 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObj #endif } -/* PyObjectCallOneArg */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { - PyObject *args[2] = {NULL, arg}; - return __Pyx_PyObject_FastCall(func, args+1, 1 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET); -} - -/* ObjectGetItem */ -#if CYTHON_USE_TYPE_SLOTS -static PyObject *__Pyx_PyObject_GetIndex(PyObject *obj, PyObject *index) { - PyObject *runerr = NULL; - Py_ssize_t key_value; - key_value = __Pyx_PyIndex_AsSsize_t(index); - if (likely(key_value != -1 || !(runerr = PyErr_Occurred()))) { - return __Pyx_GetItemInt_Fast(obj, key_value, 0, 1, 1); - } - if (PyErr_GivenExceptionMatches(runerr, PyExc_OverflowError)) { - __Pyx_TypeName index_type_name = __Pyx_PyType_GetName(Py_TYPE(index)); - PyErr_Clear(); - PyErr_Format(PyExc_IndexError, - "cannot fit '" __Pyx_FMT_TYPENAME "' into an index-sized integer", index_type_name); - __Pyx_DECREF_TypeName(index_type_name); - } - return NULL; -} -static PyObject *__Pyx_PyObject_GetItem_Slow(PyObject *obj, PyObject *key) { - __Pyx_TypeName obj_type_name; - if (likely(PyType_Check(obj))) { - PyObject *meth = __Pyx_PyObject_GetAttrStrNoError(obj, __pyx_n_s_class_getitem); - if (!meth) { - PyErr_Clear(); - } else { - PyObject *result = __Pyx_PyObject_CallOneArg(meth, key); - Py_DECREF(meth); - return result; - } - } - obj_type_name = __Pyx_PyType_GetName(Py_TYPE(obj)); - PyErr_Format(PyExc_TypeError, - "'" __Pyx_FMT_TYPENAME "' object is not subscriptable", obj_type_name); - __Pyx_DECREF_TypeName(obj_type_name); - return NULL; -} -static PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject *key) { - PyTypeObject *tp = Py_TYPE(obj); - PyMappingMethods *mm = tp->tp_as_mapping; - PySequenceMethods *sm = tp->tp_as_sequence; - if (likely(mm && mm->mp_subscript)) { - return mm->mp_subscript(obj, key); - } - if (likely(sm && sm->sq_item)) { - return __Pyx_PyObject_GetIndex(obj, key); - } - return __Pyx_PyObject_GetItem_Slow(obj, key); -} -#endif - -/* GetTopmostException */ -#if CYTHON_USE_EXC_INFO_STACK && CYTHON_FAST_THREAD_STATE -static _PyErr_StackItem * -__Pyx_PyErr_GetTopmostException(PyThreadState *tstate) -{ - _PyErr_StackItem *exc_info = tstate->exc_info; - while ((exc_info->exc_value == NULL || exc_info->exc_value == Py_None) && - exc_info->previous_item != NULL) - { - exc_info = exc_info->previous_item; - } - return exc_info; -} -#endif - -/* SaveResetException */ -#if CYTHON_FAST_THREAD_STATE -static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { - #if CYTHON_USE_EXC_INFO_STACK && PY_VERSION_HEX >= 0x030B00a4 - _PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate); - PyObject *exc_value = exc_info->exc_value; - if (exc_value == NULL || exc_value == Py_None) { - *value = NULL; - *type = NULL; - *tb = NULL; - } else { - *value = exc_value; - Py_INCREF(*value); - *type = (PyObject*) Py_TYPE(exc_value); - Py_INCREF(*type); - *tb = PyException_GetTraceback(exc_value); - } - #elif CYTHON_USE_EXC_INFO_STACK - _PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate); - *type = exc_info->exc_type; - *value = exc_info->exc_value; - *tb = exc_info->exc_traceback; - Py_XINCREF(*type); - Py_XINCREF(*value); - Py_XINCREF(*tb); - #else - *type = tstate->exc_type; - *value = tstate->exc_value; - *tb = tstate->exc_traceback; - Py_XINCREF(*type); - Py_XINCREF(*value); - Py_XINCREF(*tb); - #endif -} -static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { - #if CYTHON_USE_EXC_INFO_STACK && PY_VERSION_HEX >= 0x030B00a4 - _PyErr_StackItem *exc_info = tstate->exc_info; - PyObject *tmp_value = exc_info->exc_value; - exc_info->exc_value = value; - Py_XDECREF(tmp_value); - Py_XDECREF(type); - Py_XDECREF(tb); - #else - PyObject *tmp_type, *tmp_value, *tmp_tb; - #if CYTHON_USE_EXC_INFO_STACK - _PyErr_StackItem *exc_info = tstate->exc_info; - tmp_type = exc_info->exc_type; - tmp_value = exc_info->exc_value; - tmp_tb = exc_info->exc_traceback; - exc_info->exc_type = type; - exc_info->exc_value = value; - exc_info->exc_traceback = tb; - #else - tmp_type = tstate->exc_type; - tmp_value = tstate->exc_value; - tmp_tb = tstate->exc_traceback; - tstate->exc_type = type; - tstate->exc_value = value; - tstate->exc_traceback = tb; - #endif - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); - #endif -} -#endif - -/* GetException */ -#if CYTHON_FAST_THREAD_STATE -static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) -#else -static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) -#endif -{ - PyObject *local_type = NULL, *local_value, *local_tb = NULL; -#if CYTHON_FAST_THREAD_STATE - PyObject *tmp_type, *tmp_value, *tmp_tb; - #if PY_VERSION_HEX >= 0x030C00A6 - local_value = tstate->current_exception; - tstate->current_exception = 0; - if (likely(local_value)) { - local_type = (PyObject*) Py_TYPE(local_value); - Py_INCREF(local_type); - local_tb = PyException_GetTraceback(local_value); - } - #else - local_type = tstate->curexc_type; - local_value = tstate->curexc_value; - local_tb = tstate->curexc_traceback; - tstate->curexc_type = 0; - tstate->curexc_value = 0; - tstate->curexc_traceback = 0; - #endif -#else - PyErr_Fetch(&local_type, &local_value, &local_tb); -#endif - PyErr_NormalizeException(&local_type, &local_value, &local_tb); -#if CYTHON_FAST_THREAD_STATE && PY_VERSION_HEX >= 0x030C00A6 - if (unlikely(tstate->current_exception)) -#elif CYTHON_FAST_THREAD_STATE - if (unlikely(tstate->curexc_type)) -#else - if (unlikely(PyErr_Occurred())) -#endif - goto bad; - #if PY_MAJOR_VERSION >= 3 - if (local_tb) { - if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0)) - goto bad; - } - #endif - Py_XINCREF(local_tb); - Py_XINCREF(local_type); - Py_XINCREF(local_value); - *type = local_type; - *value = local_value; - *tb = local_tb; -#if CYTHON_FAST_THREAD_STATE - #if CYTHON_USE_EXC_INFO_STACK - { - _PyErr_StackItem *exc_info = tstate->exc_info; - #if PY_VERSION_HEX >= 0x030B00a4 - tmp_value = exc_info->exc_value; - exc_info->exc_value = local_value; - tmp_type = NULL; - tmp_tb = NULL; - Py_XDECREF(local_type); - Py_XDECREF(local_tb); - #else - tmp_type = exc_info->exc_type; - tmp_value = exc_info->exc_value; - tmp_tb = exc_info->exc_traceback; - exc_info->exc_type = local_type; - exc_info->exc_value = local_value; - exc_info->exc_traceback = local_tb; - #endif - } - #else - tmp_type = tstate->exc_type; - tmp_value = tstate->exc_value; - tmp_tb = tstate->exc_traceback; - tstate->exc_type = local_type; - tstate->exc_value = local_value; - tstate->exc_traceback = local_tb; - #endif - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); -#else - PyErr_SetExcInfo(local_type, local_value, local_tb); -#endif - return 0; -bad: - *type = 0; - *value = 0; - *tb = 0; - Py_XDECREF(local_type); - Py_XDECREF(local_value); - Py_XDECREF(local_tb); - return -1; -} - /* RaiseException */ #if PY_MAJOR_VERSION < 3 static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { @@ -6396,17 +6170,6 @@ static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name) return __Pyx_GetBuiltinName(name); } -/* RaiseUnexpectedTypeError */ -static int -__Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj) -{ - __Pyx_TypeName obj_type_name = __Pyx_PyType_GetName(Py_TYPE(obj)); - PyErr_Format(PyExc_TypeError, "Expected %s, got " __Pyx_FMT_TYPENAME, - expected, obj_type_name); - __Pyx_DECREF_TypeName(obj_type_name); - return 0; -} - /* Import */ static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { PyObject *module = 0; @@ -6508,6 +6271,101 @@ static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { return value; } +/* GetItemInt */ +static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { + PyObject *r; + if (unlikely(!j)) return NULL; + r = PyObject_GetItem(o, j); + Py_DECREF(j); + return r; +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + Py_ssize_t wrapped_i = i; + if (wraparound & unlikely(i < 0)) { + wrapped_i += PyList_GET_SIZE(o); + } + if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyList_GET_SIZE(o)))) { + PyObject *r = PyList_GET_ITEM(o, wrapped_i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + Py_ssize_t wrapped_i = i; + if (wraparound & unlikely(i < 0)) { + wrapped_i += PyTuple_GET_SIZE(o); + } + if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, wrapped_i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS + if (is_list || PyList_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o); + if ((!boundscheck) || (likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o))))) { + PyObject *r = PyList_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } + else if (PyTuple_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); + if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } else { + PyMappingMethods *mm = Py_TYPE(o)->tp_as_mapping; + PySequenceMethods *sm = Py_TYPE(o)->tp_as_sequence; + if (mm && mm->mp_subscript) { + PyObject *r, *key = PyInt_FromSsize_t(i); + if (unlikely(!key)) return NULL; + r = mm->mp_subscript(o, key); + Py_DECREF(key); + return r; + } + if (likely(sm && sm->sq_item)) { + if (wraparound && unlikely(i < 0) && likely(sm->sq_length)) { + Py_ssize_t l = sm->sq_length(o); + if (likely(l >= 0)) { + i += l; + } else { + if (!PyErr_ExceptionMatches(PyExc_OverflowError)) + return NULL; + PyErr_Clear(); + } + } + return sm->sq_item(o, i); + } + } +#else + if (is_list || !PyMapping_Check(o)) { + return PySequence_GetItem(o, i); + } +#endif + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +} + /* GetAttr */ static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) { #if CYTHON_USE_TYPE_SLOTS @@ -6620,6 +6478,12 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { return __Pyx_PyObject_FastCall(func, arg + 1, 0 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET); } +/* PyObjectCallOneArg */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *args[2] = {NULL, arg}; + return __Pyx_PyObject_FastCall(func, args+1, 1 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET); +} + /* PyObjectGetMethod */ static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method) { PyObject *attr; @@ -6782,38 +6646,38 @@ static int __Pyx_validate_bases_tuple(const char *type_name, Py_ssize_t dictoffs #endif return -1; } -#if !CYTHON_USE_TYPE_SLOTS - if (dictoffset == 0) { - PyErr_Format(PyExc_TypeError, - "extension type '%s.200s': " - "unable to validate whether bases have a __dict__ " - "when CYTHON_USE_TYPE_SLOTS is off " - "(likely because you are building in the limited API). " - "Therefore, all extension types with multiple bases " - "must add 'cdef dict __dict__' in this compilation mode", - type_name); -#if CYTHON_AVOID_BORROWED_REFS - Py_DECREF(b0); -#endif - return -1; - } -#else - if (dictoffset == 0 && b->tp_dictoffset) + if (dictoffset == 0) { - __Pyx_TypeName b_name = __Pyx_PyType_GetName(b); - PyErr_Format(PyExc_TypeError, - "extension type '%.200s' has no __dict__ slot, " - "but base type '" __Pyx_FMT_TYPENAME "' has: " - "either add 'cdef dict __dict__' to the extension type " - "or add '__slots__ = [...]' to the base type", - type_name, b_name); - __Pyx_DECREF_TypeName(b_name); + Py_ssize_t b_dictoffset = 0; +#if CYTHON_USE_TYPE_SLOTS || CYTHON_COMPILING_IN_PYPY + b_dictoffset = b->tp_dictoffset; +#else + PyObject *py_b_dictoffset = PyObject_GetAttrString((PyObject*)b, "__dictoffset__"); + if (!py_b_dictoffset) goto dictoffset_return; + b_dictoffset = PyLong_AsSsize_t(py_b_dictoffset); + Py_DECREF(py_b_dictoffset); + if (b_dictoffset == -1 && PyErr_Occurred()) goto dictoffset_return; +#endif + if (b_dictoffset) { + { + __Pyx_TypeName b_name = __Pyx_PyType_GetName(b); + PyErr_Format(PyExc_TypeError, + "extension type '%.200s' has no __dict__ slot, " + "but base type '" __Pyx_FMT_TYPENAME "' has: " + "either add 'cdef dict __dict__' to the extension type " + "or add '__slots__ = [...]' to the base type", + type_name, b_name); + __Pyx_DECREF_TypeName(b_name); + } +#if !(CYTHON_USE_TYPE_SLOTS || CYTHON_COMPILING_IN_PYPY) + dictoffset_return: +#endif #if CYTHON_AVOID_BORROWED_REFS - Py_DECREF(b0); + Py_DECREF(b0); #endif - return -1; + return -1; + } } -#endif #if CYTHON_AVOID_BORROWED_REFS Py_DECREF(b0); #endif @@ -8053,7 +7917,7 @@ static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, default: return NULL; } - return ((_PyCFunctionFastWithKeywords)(void(*)(void))def->ml_meth)(self, args, nargs, kwnames); + return ((__Pyx_PyCFunctionFastWithKeywords)(void(*)(void))def->ml_meth)(self, args, nargs, kwnames); } static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) { @@ -8512,7 +8376,7 @@ static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( #else py_code = PyCode_NewEmpty(filename, funcname, py_line); #endif - Py_XDECREF(py_funcname); // XDECREF since it's only set on Py3 if cline + Py_XDECREF(py_funcname); return py_code; bad: Py_XDECREF(py_funcname); diff --git a/aiohttp/_helpers.pyx b/aiohttp/_helpers.pyx index 665f367..5f08922 100644 --- a/aiohttp/_helpers.pyx +++ b/aiohttp/_helpers.pyx @@ -1,3 +1,6 @@ + +cdef _sentinel = object() + cdef class reify: """Use as a class method decorator. It operates almost exactly like the Python `@property` decorator, but it puts the result of the @@ -19,17 +22,14 @@ cdef class reify: return self.wrapped.__doc__ def __get__(self, inst, owner): - try: - try: - return inst._cache[self.name] - except KeyError: - val = self.wrapped(inst) - inst._cache[self.name] = val - return val - except AttributeError: - if inst is None: - return self - raise + if inst is None: + return self + cdef dict cache = inst._cache + val = cache.get(self.name, _sentinel) + if val is _sentinel: + val = self.wrapped(inst) + cache[self.name] = val + return val def __set__(self, inst, value): raise AttributeError("reified property is read-only") diff --git a/aiohttp/_http_parser.c b/aiohttp/_http_parser.c index c36983c..1e676cd 100644 --- a/aiohttp/_http_parser.c +++ b/aiohttp/_http_parser.c @@ -1,4 +1,4 @@ -/* Generated by Cython 3.0.5 */ +/* Generated by Cython 3.0.10 */ #ifndef PY_SSIZE_T_CLEAN #define PY_SSIZE_T_CLEAN @@ -41,10 +41,10 @@ #else #define __PYX_EXTRA_ABI_MODULE_NAME "" #endif -#define CYTHON_ABI "3_0_5" __PYX_EXTRA_ABI_MODULE_NAME +#define CYTHON_ABI "3_0_10" __PYX_EXTRA_ABI_MODULE_NAME #define __PYX_ABI_MODULE_NAME "_cython_" CYTHON_ABI #define __PYX_TYPE_MODULE_PREFIX __PYX_ABI_MODULE_NAME "." -#define CYTHON_HEX_VERSION 0x030005F0 +#define CYTHON_HEX_VERSION 0x03000AF0 #define CYTHON_FUTURE_DIVISION 1 #include #ifndef offsetof @@ -136,6 +136,8 @@ #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 #endif + #undef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 #elif defined(PYPY_VERSION) #define CYTHON_COMPILING_IN_PYPY 1 #define CYTHON_COMPILING_IN_CPYTHON 0 @@ -197,6 +199,8 @@ #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 #endif + #undef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 #elif defined(CYTHON_LIMITED_API) #ifdef Py_LIMITED_API #undef __PYX_LIMITED_VERSION_HEX @@ -258,7 +262,9 @@ #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 #endif -#elif defined(PY_NOGIL) + #undef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 +#elif defined(Py_GIL_DISABLED) || defined(Py_NOGIL) #define CYTHON_COMPILING_IN_PYPY 0 #define CYTHON_COMPILING_IN_CPYTHON 0 #define CYTHON_COMPILING_IN_LIMITED_API 0 @@ -267,11 +273,17 @@ #ifndef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 1 #endif + #ifndef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 0 + #endif #undef CYTHON_USE_PYTYPE_LOOKUP #define CYTHON_USE_PYTYPE_LOOKUP 0 #ifndef CYTHON_USE_ASYNC_SLOTS #define CYTHON_USE_ASYNC_SLOTS 1 #endif + #ifndef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #endif #undef CYTHON_USE_PYLIST_INTERNALS #define CYTHON_USE_PYLIST_INTERNALS 0 #ifndef CYTHON_USE_UNICODE_INTERNALS @@ -279,8 +291,6 @@ #endif #undef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 #ifndef CYTHON_AVOID_BORROWED_REFS #define CYTHON_AVOID_BORROWED_REFS 0 #endif @@ -292,11 +302,22 @@ #endif #undef CYTHON_FAST_THREAD_STATE #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL 0 + #ifndef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL 1 + #endif #undef CYTHON_FAST_PYCALL #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS 1 + #endif #ifndef CYTHON_PEP489_MULTI_PHASE_INIT #define CYTHON_PEP489_MULTI_PHASE_INIT 1 #endif + #ifndef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 0 + #endif #ifndef CYTHON_USE_TP_FINALIZE #define CYTHON_USE_TP_FINALIZE 1 #endif @@ -304,6 +325,12 @@ #define CYTHON_USE_DICT_VERSIONS 0 #undef CYTHON_USE_EXC_INFO_STACK #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 + #endif + #ifndef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 + #endif #else #define CYTHON_COMPILING_IN_PYPY 0 #define CYTHON_COMPILING_IN_CPYTHON 1 @@ -394,6 +421,9 @@ #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 #endif + #ifndef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 1 + #endif #endif #if !defined(CYTHON_FAST_PYCCALL) #define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) @@ -586,18 +616,19 @@ PyObject *exception_table = NULL; PyObject *types_module=NULL, *code_type=NULL, *result=NULL; #if __PYX_LIMITED_VERSION_HEX < 0x030B0000 - PyObject *version_info; // borrowed - #endif + PyObject *version_info; PyObject *py_minor_version = NULL; + #endif long minor_version = 0; PyObject *type, *value, *traceback; PyErr_Fetch(&type, &value, &traceback); #if __PYX_LIMITED_VERSION_HEX >= 0x030B0000 - minor_version = 11; // we don't yet need to distinguish between versions > 11 + minor_version = 11; #else if (!(version_info = PySys_GetObject("version_info"))) goto end; if (!(py_minor_version = PySequence_GetItem(version_info, 1))) goto end; minor_version = PyLong_AsLong(py_minor_version); + Py_DECREF(py_minor_version); if (minor_version == -1 && PyErr_Occurred()) goto end; #endif if (!(types_module = PyImport_ImportModule("types"))) goto end; @@ -618,7 +649,6 @@ Py_XDECREF(code_type); Py_XDECREF(exception_table); Py_XDECREF(types_module); - Py_XDECREF(py_minor_version); if (type) { PyErr_Restore(type, value, traceback); } @@ -651,7 +681,7 @@ PyObject *fv, PyObject *cell, PyObject* fn, PyObject *name, int fline, PyObject *lnos) { PyCodeObject *result; - PyObject *empty_bytes = PyBytes_FromStringAndSize("", 0); // we don't have access to __pyx_empty_bytes here + PyObject *empty_bytes = PyBytes_FromStringAndSize("", 0); if (!empty_bytes) return NULL; result = #if PY_VERSION_HEX >= 0x030C0000 @@ -737,8 +767,13 @@ typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames); #else - #define __Pyx_PyCFunctionFast _PyCFunctionFast - #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords + #if PY_VERSION_HEX >= 0x030d00A4 + # define __Pyx_PyCFunctionFast PyCFunctionFast + # define __Pyx_PyCFunctionFastWithKeywords PyCFunctionFastWithKeywords + #else + # define __Pyx_PyCFunctionFast _PyCFunctionFast + # define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords + #endif #endif #if CYTHON_METH_FASTCALL #define __Pyx_METH_FASTCALL METH_FASTCALL @@ -946,7 +981,7 @@ static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStrWithError(PyObject *dict, #endif #if CYTHON_USE_TYPE_SPECS && PY_VERSION_HEX >= 0x03080000 #define __Pyx_PyHeapTypeObject_GC_Del(obj) {\ - PyTypeObject *type = Py_TYPE(obj);\ + PyTypeObject *type = Py_TYPE((PyObject*)obj);\ assert(__Pyx_PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE));\ PyObject_GC_Del(obj);\ Py_DECREF(type);\ @@ -1090,7 +1125,7 @@ static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStrWithError(PyObject *dict, #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_Size(o) #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_Size(o) #endif -#if PY_VERSION_HEX >= 0x030d00A1 +#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 #define __Pyx_PyImport_AddModuleRef(name) PyImport_AddModuleRef(name) #else static CYTHON_INLINE PyObject *__Pyx_PyImport_AddModuleRef(const char *name) { @@ -1177,7 +1212,7 @@ static CYTHON_INLINE float __PYX_NAN() { #endif #define __PYX_MARK_ERR_POS(f_index, lineno) \ - { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } + { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } #define __PYX_ERR(f_index, lineno, Ln_error) \ { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; } @@ -1203,6 +1238,7 @@ static CYTHON_INLINE float __PYX_NAN() { /* Early includes */ #include #include +#include #include "pythread.h" #include #include @@ -1287,24 +1323,7 @@ static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); #define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) #define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) #define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) -#if CYTHON_COMPILING_IN_LIMITED_API -static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const wchar_t *u) -{ - const wchar_t *u_end = u; - while (*u_end++) ; - return (size_t)(u_end - u - 1); -} -#else -static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) -{ - const Py_UNICODE *u_end = u; - while (*u_end++) ; - return (size_t)(u_end - u - 1); -} -#endif #define __Pyx_PyUnicode_FromOrdinal(o) PyUnicode_FromOrdinal((int)o) -#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) -#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode #define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode #define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) #define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) @@ -1354,7 +1373,7 @@ static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*); #endif typedef Py_ssize_t __Pyx_compact_pylong; typedef size_t __Pyx_compact_upylong; - #else // Py < 3.12 + #else #define __Pyx_PyLong_IsNeg(x) (Py_SIZE(x) < 0) #define __Pyx_PyLong_IsNonNeg(x) (Py_SIZE(x) >= 0) #define __Pyx_PyLong_IsZero(x) (Py_SIZE(x) == 0) @@ -1527,7 +1546,7 @@ struct __pyx_opt_args_7cpython_11contextvars_get_value_no_default { }; struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__init; -/* "aiohttp/_http_parser.pyx":327 +/* "aiohttp/_http_parser.pyx":328 * PyMem_Free(self._csettings) * * cdef _init( # <<<<<<<<<<<<<< @@ -1546,7 +1565,7 @@ struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__init { int auto_decompress; }; -/* "aiohttp/_http_parser.pyx":110 +/* "aiohttp/_http_parser.pyx":111 * * @cython.freelist(DEFAULT_FREELIST_SIZE) * cdef class RawRequestMessage: # <<<<<<<<<<<<<< @@ -1568,7 +1587,7 @@ struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage { }; -/* "aiohttp/_http_parser.pyx":210 +/* "aiohttp/_http_parser.pyx":211 * * @cython.freelist(DEFAULT_FREELIST_SIZE) * cdef class RawResponseMessage: # <<<<<<<<<<<<<< @@ -1589,7 +1608,7 @@ struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage { }; -/* "aiohttp/_http_parser.pyx":272 +/* "aiohttp/_http_parser.pyx":273 * * @cython.internal * cdef class HttpParser: # <<<<<<<<<<<<<< @@ -1632,7 +1651,7 @@ struct __pyx_obj_7aiohttp_12_http_parser_HttpParser { }; -/* "aiohttp/_http_parser.pyx":574 +/* "aiohttp/_http_parser.pyx":580 * * * cdef class HttpRequestParser(HttpParser): # <<<<<<<<<<<<<< @@ -1644,7 +1663,7 @@ struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParser { }; -/* "aiohttp/_http_parser.pyx":638 +/* "aiohttp/_http_parser.pyx":644 * * * cdef class HttpResponseParser(HttpParser): # <<<<<<<<<<<<<< @@ -1656,7 +1675,7 @@ struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParser { }; -/* "aiohttp/_http_parser.pyx":147 +/* "aiohttp/_http_parser.pyx":148 * info.append(("chunked", self.chunked)) * info.append(("url", self.url)) * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) # <<<<<<<<<<<<<< @@ -1671,7 +1690,7 @@ struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct__genexpr { }; -/* "aiohttp/_http_parser.pyx":244 +/* "aiohttp/_http_parser.pyx":245 * info.append(("upgrade", self.upgrade)) * info.append(("chunked", self.chunked)) * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) # <<<<<<<<<<<<<< @@ -1687,7 +1706,7 @@ struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr { -/* "aiohttp/_http_parser.pyx":272 +/* "aiohttp/_http_parser.pyx":273 * * @cython.internal * cdef class HttpParser: # <<<<<<<<<<<<<< @@ -1711,7 +1730,7 @@ static struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *__pyx_vtabptr static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser_http_version(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *); -/* "aiohttp/_http_parser.pyx":574 +/* "aiohttp/_http_parser.pyx":580 * * * cdef class HttpRequestParser(HttpParser): # <<<<<<<<<<<<<< @@ -1725,7 +1744,7 @@ struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpRequestParser { static struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpRequestParser *__pyx_vtabptr_7aiohttp_12_http_parser_HttpRequestParser; -/* "aiohttp/_http_parser.pyx":638 +/* "aiohttp/_http_parser.pyx":644 * * * cdef class HttpResponseParser(HttpParser): # <<<<<<<<<<<<<< @@ -1972,8 +1991,8 @@ static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int #define __Pyx_Arg_NewRef_VARARGS(arg) __Pyx_NewRef(arg) #define __Pyx_Arg_XDECREF_VARARGS(arg) Py_XDECREF(arg) #else - #define __Pyx_Arg_NewRef_VARARGS(arg) arg // no-op - #define __Pyx_Arg_XDECREF_VARARGS(arg) // no-op - arg is borrowed + #define __Pyx_Arg_NewRef_VARARGS(arg) arg + #define __Pyx_Arg_XDECREF_VARARGS(arg) #endif #define __Pyx_NumKwargs_VARARGS(kwds) PyDict_Size(kwds) #define __Pyx_KwValues_VARARGS(args, nargs) NULL @@ -1985,12 +2004,13 @@ static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int #define __Pyx_KwValues_FASTCALL(args, nargs) ((args) + (nargs)) static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s); #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 - static CYTHON_UNUSED PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues); + CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues); #else #define __Pyx_KwargsAsDict_FASTCALL(kw, kwvalues) _PyStack_AsDict(kwvalues, kw) #endif - #define __Pyx_Arg_NewRef_FASTCALL(arg) arg // no-op, __Pyx_Arg_FASTCALL is direct and this needs - #define __Pyx_Arg_XDECREF_FASTCALL(arg) // no-op - arg was returned from array + #define __Pyx_Arg_NewRef_FASTCALL(arg) arg /* no-op, __Pyx_Arg_FASTCALL is direct and this needs + to have the same reference counting */ + #define __Pyx_Arg_XDECREF_FASTCALL(arg) #else #define __Pyx_Arg_FASTCALL __Pyx_Arg_VARARGS #define __Pyx_NumKwargs_FASTCALL __Pyx_NumKwargs_VARARGS @@ -2416,24 +2436,27 @@ static int __Pyx_MergeVtables(PyTypeObject *type); #endif /* TypeImport.proto */ -#ifndef __PYX_HAVE_RT_ImportType_proto_3_0_5 -#define __PYX_HAVE_RT_ImportType_proto_3_0_5 +#ifndef __PYX_HAVE_RT_ImportType_proto_3_0_10 +#define __PYX_HAVE_RT_ImportType_proto_3_0_10 #if defined (__STDC_VERSION__) && __STDC_VERSION__ >= 201112L #include #endif #if (defined (__STDC_VERSION__) && __STDC_VERSION__ >= 201112L) || __cplusplus >= 201103L -#define __PYX_GET_STRUCT_ALIGNMENT_3_0_5(s) alignof(s) +#define __PYX_GET_STRUCT_ALIGNMENT_3_0_10(s) alignof(s) #else -#define __PYX_GET_STRUCT_ALIGNMENT_3_0_5(s) sizeof(void*) +#define __PYX_GET_STRUCT_ALIGNMENT_3_0_10(s) sizeof(void*) #endif -enum __Pyx_ImportType_CheckSize_3_0_5 { - __Pyx_ImportType_CheckSize_Error_3_0_5 = 0, - __Pyx_ImportType_CheckSize_Warn_3_0_5 = 1, - __Pyx_ImportType_CheckSize_Ignore_3_0_5 = 2 +enum __Pyx_ImportType_CheckSize_3_0_10 { + __Pyx_ImportType_CheckSize_Error_3_0_10 = 0, + __Pyx_ImportType_CheckSize_Warn_3_0_10 = 1, + __Pyx_ImportType_CheckSize_Ignore_3_0_10 = 2 }; -static PyTypeObject *__Pyx_ImportType_3_0_5(PyObject* module, const char *module_name, const char *class_name, size_t size, size_t alignment, enum __Pyx_ImportType_CheckSize_3_0_5 check_size); +static PyTypeObject *__Pyx_ImportType_3_0_10(PyObject* module, const char *module_name, const char *class_name, size_t size, size_t alignment, enum __Pyx_ImportType_CheckSize_3_0_10 check_size); #endif +/* pyfrozenset_new.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyFrozenSet_New(PyObject* it); + /* decode_c_string.proto */ static CYTHON_INLINE PyObject* __Pyx_decode_c_string( const char* cstring, Py_ssize_t start, Py_ssize_t stop, @@ -2530,7 +2553,7 @@ typedef struct { #endif void *defaults; int defaults_pyobjects; - size_t defaults_size; // used by FusedFunction for copying defaults + size_t defaults_size; int flags; PyObject *defaults_tuple; PyObject *defaults_kwdict; @@ -2817,6 +2840,8 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_18HttpResponseParser__on_status /* Module declarations from "cpython.string" */ +/* Module declarations from "libc.stddef" */ + /* Module declarations from "cpython.unicode" */ /* Module declarations from "cpython.pyport" */ @@ -2917,11 +2942,10 @@ static const char __pyx_k_r[] = "{}:\n\n {!r}\n {}"; static const char __pyx_k_TE[] = "TE"; static const char __pyx_k__2[] = ", "; static const char __pyx_k__3[] = ")>"; -static const char __pyx_k__4[] = "\r\n"; -static const char __pyx_k__6[] = " "; -static const char __pyx_k__7[] = "^"; -static const char __pyx_k__8[] = ""; -static const char __pyx_k__9[] = "?"; +static const char __pyx_k__4[] = ""; +static const char __pyx_k__6[] = "\r\n"; +static const char __pyx_k__8[] = " "; +static const char __pyx_k__9[] = "^"; static const char __pyx_k_br[] = "br"; static const char __pyx_k_ex[] = "ex"; static const char __pyx_k_gc[] = "gc"; @@ -2930,11 +2954,13 @@ static const char __pyx_k_AGE[] = "AGE"; static const char __pyx_k_URI[] = "URI"; static const char __pyx_k_URL[] = "URL"; static const char __pyx_k_VIA[] = "VIA"; -static const char __pyx_k__10[] = "#"; -static const char __pyx_k__12[] = "."; +static const char __pyx_k__10[] = "?"; +static const char __pyx_k__11[] = "#"; +static const char __pyx_k__13[] = "."; static const char __pyx_k_add[] = "add"; static const char __pyx_k_all[] = "__all__"; static const char __pyx_k_dct[] = "dct"; +static const char __pyx_k_get[] = "get"; static const char __pyx_k_new[] = "__new__"; static const char __pyx_k_ret[] = "ret"; static const char __pyx_k_url[] = "url"; @@ -3032,6 +3058,7 @@ static const char __pyx_k_isenabled[] = "isenabled"; static const char __pyx_k_multidict[] = "multidict"; static const char __pyx_k_pyx_state[] = "__pyx_state"; static const char __pyx_k_reduce_ex[] = "__reduce_ex__"; +static const char __pyx_k_websocket[] = "websocket"; static const char __pyx_k_CONNECTION[] = "CONNECTION"; static const char __pyx_k_KEEP_ALIVE[] = "KEEP_ALIVE"; static const char __pyx_k_SET_COOKIE[] = "SET_COOKIE"; @@ -3100,6 +3127,7 @@ static const char __pyx_k_auto_decompress[] = "auto_decompress"; static const char __pyx_k_http_exceptions[] = "http_exceptions"; static const char __pyx_k_pyx_PickleError[] = "__pyx_PickleError"; static const char __pyx_k_setstate_cython[] = "__setstate_cython__"; +static const char __pyx_k_ALLOWED_UPGRADES[] = "ALLOWED_UPGRADES"; static const char __pyx_k_CIMultiDictProxy[] = "CIMultiDictProxy"; static const char __pyx_k_CONTENT_ENCODING[] = "CONTENT_ENCODING"; static const char __pyx_k_CONTENT_LANGUAGE[] = "CONTENT_LANGUAGE"; @@ -3352,6 +3380,8 @@ typedef struct { #if CYTHON_USE_MODULE_STATE #endif #if CYTHON_USE_MODULE_STATE + #endif + #if CYTHON_USE_MODULE_STATE PyObject *__pyx_type_7aiohttp_12_http_parser_RawRequestMessage; PyObject *__pyx_type_7aiohttp_12_http_parser_RawResponseMessage; PyObject *__pyx_type_7aiohttp_12_http_parser_HttpParser; @@ -3383,6 +3413,7 @@ typedef struct { PyObject *__pyx_n_s_ACCESS_CONTROL_REQUEST_METHOD; PyObject *__pyx_n_s_AGE; PyObject *__pyx_n_s_ALLOW; + PyObject *__pyx_n_s_ALLOWED_UPGRADES; PyObject *__pyx_n_s_AUTHORIZATION; PyObject *__pyx_n_s_BadHttpMessage; PyObject *__pyx_n_s_BadStatusLine; @@ -3507,17 +3538,17 @@ typedef struct { PyObject *__pyx_n_s_X_FORWARDED_FOR; PyObject *__pyx_n_s_X_FORWARDED_HOST; PyObject *__pyx_n_s_X_FORWARDED_PROTO; + PyObject *__pyx_n_s__10; PyObject *__pyx_kp_u__10; - PyObject *__pyx_kp_u__12; + PyObject *__pyx_kp_u__11; + PyObject *__pyx_kp_u__13; PyObject *__pyx_kp_u__2; PyObject *__pyx_kp_u__3; + PyObject *__pyx_n_s__4; PyObject *__pyx_kp_b__4; - PyObject *__pyx_kp_u__6; - PyObject *__pyx_kp_u__7; - PyObject *__pyx_n_s__8; - PyObject *__pyx_kp_b__8; + PyObject *__pyx_kp_u__4; + PyObject *__pyx_kp_b__6; PyObject *__pyx_kp_u__8; - PyObject *__pyx_n_s__9; PyObject *__pyx_kp_u__9; PyObject *__pyx_n_s_add; PyObject *__pyx_n_s_after; @@ -3563,6 +3594,7 @@ typedef struct { PyObject *__pyx_n_s_fragment; PyObject *__pyx_kp_u_gc; PyObject *__pyx_n_s_genexpr; + PyObject *__pyx_n_s_get; PyObject *__pyx_n_s_getstate; PyObject *__pyx_n_u_gzip; PyObject *__pyx_n_s_hdrs; @@ -3645,6 +3677,7 @@ typedef struct { PyObject *__pyx_n_s_val; PyObject *__pyx_n_s_version; PyObject *__pyx_n_u_version; + PyObject *__pyx_n_u_websocket; PyObject *__pyx_n_s_yarl; PyObject *__pyx_int_1; PyObject *__pyx_int_4084195; @@ -3654,33 +3687,34 @@ typedef struct { PyObject *__pyx_int_213037754; PyObject *__pyx_int_257960607; PyObject *__pyx_tuple__5; - PyObject *__pyx_tuple__11; - PyObject *__pyx_tuple__13; + PyObject *__pyx_tuple__7; + PyObject *__pyx_tuple__12; PyObject *__pyx_tuple__14; PyObject *__pyx_tuple__15; - PyObject *__pyx_tuple__17; - PyObject *__pyx_tuple__19; - PyObject *__pyx_tuple__23; - PyObject *__pyx_tuple__25; - PyObject *__pyx_tuple__27; - PyObject *__pyx_tuple__29; - PyObject *__pyx_tuple__36; - PyObject *__pyx_codeobj__16; - PyObject *__pyx_codeobj__18; - PyObject *__pyx_codeobj__20; + PyObject *__pyx_tuple__16; + PyObject *__pyx_tuple__18; + PyObject *__pyx_tuple__20; + PyObject *__pyx_tuple__24; + PyObject *__pyx_tuple__26; + PyObject *__pyx_tuple__28; + PyObject *__pyx_tuple__30; + PyObject *__pyx_tuple__37; + PyObject *__pyx_codeobj__17; + PyObject *__pyx_codeobj__19; PyObject *__pyx_codeobj__21; PyObject *__pyx_codeobj__22; - PyObject *__pyx_codeobj__24; - PyObject *__pyx_codeobj__26; - PyObject *__pyx_codeobj__28; - PyObject *__pyx_codeobj__30; + PyObject *__pyx_codeobj__23; + PyObject *__pyx_codeobj__25; + PyObject *__pyx_codeobj__27; + PyObject *__pyx_codeobj__29; PyObject *__pyx_codeobj__31; PyObject *__pyx_codeobj__32; PyObject *__pyx_codeobj__33; PyObject *__pyx_codeobj__34; PyObject *__pyx_codeobj__35; - PyObject *__pyx_codeobj__37; + PyObject *__pyx_codeobj__36; PyObject *__pyx_codeobj__38; + PyObject *__pyx_codeobj__39; } __pyx_mstate; #if CYTHON_USE_MODULE_STATE @@ -3756,6 +3790,7 @@ static int __pyx_m_clear(PyObject *m) { Py_CLEAR(clear_module_state->__pyx_n_s_ACCESS_CONTROL_REQUEST_METHOD); Py_CLEAR(clear_module_state->__pyx_n_s_AGE); Py_CLEAR(clear_module_state->__pyx_n_s_ALLOW); + Py_CLEAR(clear_module_state->__pyx_n_s_ALLOWED_UPGRADES); Py_CLEAR(clear_module_state->__pyx_n_s_AUTHORIZATION); Py_CLEAR(clear_module_state->__pyx_n_s_BadHttpMessage); Py_CLEAR(clear_module_state->__pyx_n_s_BadStatusLine); @@ -3880,17 +3915,17 @@ static int __pyx_m_clear(PyObject *m) { Py_CLEAR(clear_module_state->__pyx_n_s_X_FORWARDED_FOR); Py_CLEAR(clear_module_state->__pyx_n_s_X_FORWARDED_HOST); Py_CLEAR(clear_module_state->__pyx_n_s_X_FORWARDED_PROTO); + Py_CLEAR(clear_module_state->__pyx_n_s__10); Py_CLEAR(clear_module_state->__pyx_kp_u__10); - Py_CLEAR(clear_module_state->__pyx_kp_u__12); + Py_CLEAR(clear_module_state->__pyx_kp_u__11); + Py_CLEAR(clear_module_state->__pyx_kp_u__13); Py_CLEAR(clear_module_state->__pyx_kp_u__2); Py_CLEAR(clear_module_state->__pyx_kp_u__3); + Py_CLEAR(clear_module_state->__pyx_n_s__4); Py_CLEAR(clear_module_state->__pyx_kp_b__4); - Py_CLEAR(clear_module_state->__pyx_kp_u__6); - Py_CLEAR(clear_module_state->__pyx_kp_u__7); - Py_CLEAR(clear_module_state->__pyx_n_s__8); - Py_CLEAR(clear_module_state->__pyx_kp_b__8); + Py_CLEAR(clear_module_state->__pyx_kp_u__4); + Py_CLEAR(clear_module_state->__pyx_kp_b__6); Py_CLEAR(clear_module_state->__pyx_kp_u__8); - Py_CLEAR(clear_module_state->__pyx_n_s__9); Py_CLEAR(clear_module_state->__pyx_kp_u__9); Py_CLEAR(clear_module_state->__pyx_n_s_add); Py_CLEAR(clear_module_state->__pyx_n_s_after); @@ -3936,6 +3971,7 @@ static int __pyx_m_clear(PyObject *m) { Py_CLEAR(clear_module_state->__pyx_n_s_fragment); Py_CLEAR(clear_module_state->__pyx_kp_u_gc); Py_CLEAR(clear_module_state->__pyx_n_s_genexpr); + Py_CLEAR(clear_module_state->__pyx_n_s_get); Py_CLEAR(clear_module_state->__pyx_n_s_getstate); Py_CLEAR(clear_module_state->__pyx_n_u_gzip); Py_CLEAR(clear_module_state->__pyx_n_s_hdrs); @@ -4018,6 +4054,7 @@ static int __pyx_m_clear(PyObject *m) { Py_CLEAR(clear_module_state->__pyx_n_s_val); Py_CLEAR(clear_module_state->__pyx_n_s_version); Py_CLEAR(clear_module_state->__pyx_n_u_version); + Py_CLEAR(clear_module_state->__pyx_n_u_websocket); Py_CLEAR(clear_module_state->__pyx_n_s_yarl); Py_CLEAR(clear_module_state->__pyx_int_1); Py_CLEAR(clear_module_state->__pyx_int_4084195); @@ -4027,33 +4064,34 @@ static int __pyx_m_clear(PyObject *m) { Py_CLEAR(clear_module_state->__pyx_int_213037754); Py_CLEAR(clear_module_state->__pyx_int_257960607); Py_CLEAR(clear_module_state->__pyx_tuple__5); - Py_CLEAR(clear_module_state->__pyx_tuple__11); - Py_CLEAR(clear_module_state->__pyx_tuple__13); + Py_CLEAR(clear_module_state->__pyx_tuple__7); + Py_CLEAR(clear_module_state->__pyx_tuple__12); Py_CLEAR(clear_module_state->__pyx_tuple__14); Py_CLEAR(clear_module_state->__pyx_tuple__15); - Py_CLEAR(clear_module_state->__pyx_tuple__17); - Py_CLEAR(clear_module_state->__pyx_tuple__19); - Py_CLEAR(clear_module_state->__pyx_tuple__23); - Py_CLEAR(clear_module_state->__pyx_tuple__25); - Py_CLEAR(clear_module_state->__pyx_tuple__27); - Py_CLEAR(clear_module_state->__pyx_tuple__29); - Py_CLEAR(clear_module_state->__pyx_tuple__36); - Py_CLEAR(clear_module_state->__pyx_codeobj__16); - Py_CLEAR(clear_module_state->__pyx_codeobj__18); - Py_CLEAR(clear_module_state->__pyx_codeobj__20); + Py_CLEAR(clear_module_state->__pyx_tuple__16); + Py_CLEAR(clear_module_state->__pyx_tuple__18); + Py_CLEAR(clear_module_state->__pyx_tuple__20); + Py_CLEAR(clear_module_state->__pyx_tuple__24); + Py_CLEAR(clear_module_state->__pyx_tuple__26); + Py_CLEAR(clear_module_state->__pyx_tuple__28); + Py_CLEAR(clear_module_state->__pyx_tuple__30); + Py_CLEAR(clear_module_state->__pyx_tuple__37); + Py_CLEAR(clear_module_state->__pyx_codeobj__17); + Py_CLEAR(clear_module_state->__pyx_codeobj__19); Py_CLEAR(clear_module_state->__pyx_codeobj__21); Py_CLEAR(clear_module_state->__pyx_codeobj__22); - Py_CLEAR(clear_module_state->__pyx_codeobj__24); - Py_CLEAR(clear_module_state->__pyx_codeobj__26); - Py_CLEAR(clear_module_state->__pyx_codeobj__28); - Py_CLEAR(clear_module_state->__pyx_codeobj__30); + Py_CLEAR(clear_module_state->__pyx_codeobj__23); + Py_CLEAR(clear_module_state->__pyx_codeobj__25); + Py_CLEAR(clear_module_state->__pyx_codeobj__27); + Py_CLEAR(clear_module_state->__pyx_codeobj__29); Py_CLEAR(clear_module_state->__pyx_codeobj__31); Py_CLEAR(clear_module_state->__pyx_codeobj__32); Py_CLEAR(clear_module_state->__pyx_codeobj__33); Py_CLEAR(clear_module_state->__pyx_codeobj__34); Py_CLEAR(clear_module_state->__pyx_codeobj__35); - Py_CLEAR(clear_module_state->__pyx_codeobj__37); + Py_CLEAR(clear_module_state->__pyx_codeobj__36); Py_CLEAR(clear_module_state->__pyx_codeobj__38); + Py_CLEAR(clear_module_state->__pyx_codeobj__39); return 0; } #endif @@ -4107,6 +4145,7 @@ static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { Py_VISIT(traverse_module_state->__pyx_n_s_ACCESS_CONTROL_REQUEST_METHOD); Py_VISIT(traverse_module_state->__pyx_n_s_AGE); Py_VISIT(traverse_module_state->__pyx_n_s_ALLOW); + Py_VISIT(traverse_module_state->__pyx_n_s_ALLOWED_UPGRADES); Py_VISIT(traverse_module_state->__pyx_n_s_AUTHORIZATION); Py_VISIT(traverse_module_state->__pyx_n_s_BadHttpMessage); Py_VISIT(traverse_module_state->__pyx_n_s_BadStatusLine); @@ -4231,17 +4270,17 @@ static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { Py_VISIT(traverse_module_state->__pyx_n_s_X_FORWARDED_FOR); Py_VISIT(traverse_module_state->__pyx_n_s_X_FORWARDED_HOST); Py_VISIT(traverse_module_state->__pyx_n_s_X_FORWARDED_PROTO); + Py_VISIT(traverse_module_state->__pyx_n_s__10); Py_VISIT(traverse_module_state->__pyx_kp_u__10); - Py_VISIT(traverse_module_state->__pyx_kp_u__12); + Py_VISIT(traverse_module_state->__pyx_kp_u__11); + Py_VISIT(traverse_module_state->__pyx_kp_u__13); Py_VISIT(traverse_module_state->__pyx_kp_u__2); Py_VISIT(traverse_module_state->__pyx_kp_u__3); + Py_VISIT(traverse_module_state->__pyx_n_s__4); Py_VISIT(traverse_module_state->__pyx_kp_b__4); - Py_VISIT(traverse_module_state->__pyx_kp_u__6); - Py_VISIT(traverse_module_state->__pyx_kp_u__7); - Py_VISIT(traverse_module_state->__pyx_n_s__8); - Py_VISIT(traverse_module_state->__pyx_kp_b__8); + Py_VISIT(traverse_module_state->__pyx_kp_u__4); + Py_VISIT(traverse_module_state->__pyx_kp_b__6); Py_VISIT(traverse_module_state->__pyx_kp_u__8); - Py_VISIT(traverse_module_state->__pyx_n_s__9); Py_VISIT(traverse_module_state->__pyx_kp_u__9); Py_VISIT(traverse_module_state->__pyx_n_s_add); Py_VISIT(traverse_module_state->__pyx_n_s_after); @@ -4287,6 +4326,7 @@ static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { Py_VISIT(traverse_module_state->__pyx_n_s_fragment); Py_VISIT(traverse_module_state->__pyx_kp_u_gc); Py_VISIT(traverse_module_state->__pyx_n_s_genexpr); + Py_VISIT(traverse_module_state->__pyx_n_s_get); Py_VISIT(traverse_module_state->__pyx_n_s_getstate); Py_VISIT(traverse_module_state->__pyx_n_u_gzip); Py_VISIT(traverse_module_state->__pyx_n_s_hdrs); @@ -4369,6 +4409,7 @@ static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { Py_VISIT(traverse_module_state->__pyx_n_s_val); Py_VISIT(traverse_module_state->__pyx_n_s_version); Py_VISIT(traverse_module_state->__pyx_n_u_version); + Py_VISIT(traverse_module_state->__pyx_n_u_websocket); Py_VISIT(traverse_module_state->__pyx_n_s_yarl); Py_VISIT(traverse_module_state->__pyx_int_1); Py_VISIT(traverse_module_state->__pyx_int_4084195); @@ -4378,33 +4419,34 @@ static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { Py_VISIT(traverse_module_state->__pyx_int_213037754); Py_VISIT(traverse_module_state->__pyx_int_257960607); Py_VISIT(traverse_module_state->__pyx_tuple__5); - Py_VISIT(traverse_module_state->__pyx_tuple__11); - Py_VISIT(traverse_module_state->__pyx_tuple__13); + Py_VISIT(traverse_module_state->__pyx_tuple__7); + Py_VISIT(traverse_module_state->__pyx_tuple__12); Py_VISIT(traverse_module_state->__pyx_tuple__14); Py_VISIT(traverse_module_state->__pyx_tuple__15); - Py_VISIT(traverse_module_state->__pyx_tuple__17); - Py_VISIT(traverse_module_state->__pyx_tuple__19); - Py_VISIT(traverse_module_state->__pyx_tuple__23); - Py_VISIT(traverse_module_state->__pyx_tuple__25); - Py_VISIT(traverse_module_state->__pyx_tuple__27); - Py_VISIT(traverse_module_state->__pyx_tuple__29); - Py_VISIT(traverse_module_state->__pyx_tuple__36); - Py_VISIT(traverse_module_state->__pyx_codeobj__16); - Py_VISIT(traverse_module_state->__pyx_codeobj__18); - Py_VISIT(traverse_module_state->__pyx_codeobj__20); + Py_VISIT(traverse_module_state->__pyx_tuple__16); + Py_VISIT(traverse_module_state->__pyx_tuple__18); + Py_VISIT(traverse_module_state->__pyx_tuple__20); + Py_VISIT(traverse_module_state->__pyx_tuple__24); + Py_VISIT(traverse_module_state->__pyx_tuple__26); + Py_VISIT(traverse_module_state->__pyx_tuple__28); + Py_VISIT(traverse_module_state->__pyx_tuple__30); + Py_VISIT(traverse_module_state->__pyx_tuple__37); + Py_VISIT(traverse_module_state->__pyx_codeobj__17); + Py_VISIT(traverse_module_state->__pyx_codeobj__19); Py_VISIT(traverse_module_state->__pyx_codeobj__21); Py_VISIT(traverse_module_state->__pyx_codeobj__22); - Py_VISIT(traverse_module_state->__pyx_codeobj__24); - Py_VISIT(traverse_module_state->__pyx_codeobj__26); - Py_VISIT(traverse_module_state->__pyx_codeobj__28); - Py_VISIT(traverse_module_state->__pyx_codeobj__30); + Py_VISIT(traverse_module_state->__pyx_codeobj__23); + Py_VISIT(traverse_module_state->__pyx_codeobj__25); + Py_VISIT(traverse_module_state->__pyx_codeobj__27); + Py_VISIT(traverse_module_state->__pyx_codeobj__29); Py_VISIT(traverse_module_state->__pyx_codeobj__31); Py_VISIT(traverse_module_state->__pyx_codeobj__32); Py_VISIT(traverse_module_state->__pyx_codeobj__33); Py_VISIT(traverse_module_state->__pyx_codeobj__34); Py_VISIT(traverse_module_state->__pyx_codeobj__35); - Py_VISIT(traverse_module_state->__pyx_codeobj__37); + Py_VISIT(traverse_module_state->__pyx_codeobj__36); Py_VISIT(traverse_module_state->__pyx_codeobj__38); + Py_VISIT(traverse_module_state->__pyx_codeobj__39); return 0; } #endif @@ -4533,6 +4575,8 @@ static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { #if CYTHON_USE_MODULE_STATE #endif #if CYTHON_USE_MODULE_STATE +#endif +#if CYTHON_USE_MODULE_STATE #define __pyx_type_7aiohttp_12_http_parser_RawRequestMessage __pyx_mstate_global->__pyx_type_7aiohttp_12_http_parser_RawRequestMessage #define __pyx_type_7aiohttp_12_http_parser_RawResponseMessage __pyx_mstate_global->__pyx_type_7aiohttp_12_http_parser_RawResponseMessage #define __pyx_type_7aiohttp_12_http_parser_HttpParser __pyx_mstate_global->__pyx_type_7aiohttp_12_http_parser_HttpParser @@ -4564,6 +4608,7 @@ static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { #define __pyx_n_s_ACCESS_CONTROL_REQUEST_METHOD __pyx_mstate_global->__pyx_n_s_ACCESS_CONTROL_REQUEST_METHOD #define __pyx_n_s_AGE __pyx_mstate_global->__pyx_n_s_AGE #define __pyx_n_s_ALLOW __pyx_mstate_global->__pyx_n_s_ALLOW +#define __pyx_n_s_ALLOWED_UPGRADES __pyx_mstate_global->__pyx_n_s_ALLOWED_UPGRADES #define __pyx_n_s_AUTHORIZATION __pyx_mstate_global->__pyx_n_s_AUTHORIZATION #define __pyx_n_s_BadHttpMessage __pyx_mstate_global->__pyx_n_s_BadHttpMessage #define __pyx_n_s_BadStatusLine __pyx_mstate_global->__pyx_n_s_BadStatusLine @@ -4688,17 +4733,17 @@ static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { #define __pyx_n_s_X_FORWARDED_FOR __pyx_mstate_global->__pyx_n_s_X_FORWARDED_FOR #define __pyx_n_s_X_FORWARDED_HOST __pyx_mstate_global->__pyx_n_s_X_FORWARDED_HOST #define __pyx_n_s_X_FORWARDED_PROTO __pyx_mstate_global->__pyx_n_s_X_FORWARDED_PROTO +#define __pyx_n_s__10 __pyx_mstate_global->__pyx_n_s__10 #define __pyx_kp_u__10 __pyx_mstate_global->__pyx_kp_u__10 -#define __pyx_kp_u__12 __pyx_mstate_global->__pyx_kp_u__12 +#define __pyx_kp_u__11 __pyx_mstate_global->__pyx_kp_u__11 +#define __pyx_kp_u__13 __pyx_mstate_global->__pyx_kp_u__13 #define __pyx_kp_u__2 __pyx_mstate_global->__pyx_kp_u__2 #define __pyx_kp_u__3 __pyx_mstate_global->__pyx_kp_u__3 +#define __pyx_n_s__4 __pyx_mstate_global->__pyx_n_s__4 #define __pyx_kp_b__4 __pyx_mstate_global->__pyx_kp_b__4 -#define __pyx_kp_u__6 __pyx_mstate_global->__pyx_kp_u__6 -#define __pyx_kp_u__7 __pyx_mstate_global->__pyx_kp_u__7 -#define __pyx_n_s__8 __pyx_mstate_global->__pyx_n_s__8 -#define __pyx_kp_b__8 __pyx_mstate_global->__pyx_kp_b__8 +#define __pyx_kp_u__4 __pyx_mstate_global->__pyx_kp_u__4 +#define __pyx_kp_b__6 __pyx_mstate_global->__pyx_kp_b__6 #define __pyx_kp_u__8 __pyx_mstate_global->__pyx_kp_u__8 -#define __pyx_n_s__9 __pyx_mstate_global->__pyx_n_s__9 #define __pyx_kp_u__9 __pyx_mstate_global->__pyx_kp_u__9 #define __pyx_n_s_add __pyx_mstate_global->__pyx_n_s_add #define __pyx_n_s_after __pyx_mstate_global->__pyx_n_s_after @@ -4744,6 +4789,7 @@ static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { #define __pyx_n_s_fragment __pyx_mstate_global->__pyx_n_s_fragment #define __pyx_kp_u_gc __pyx_mstate_global->__pyx_kp_u_gc #define __pyx_n_s_genexpr __pyx_mstate_global->__pyx_n_s_genexpr +#define __pyx_n_s_get __pyx_mstate_global->__pyx_n_s_get #define __pyx_n_s_getstate __pyx_mstate_global->__pyx_n_s_getstate #define __pyx_n_u_gzip __pyx_mstate_global->__pyx_n_u_gzip #define __pyx_n_s_hdrs __pyx_mstate_global->__pyx_n_s_hdrs @@ -4826,6 +4872,7 @@ static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { #define __pyx_n_s_val __pyx_mstate_global->__pyx_n_s_val #define __pyx_n_s_version __pyx_mstate_global->__pyx_n_s_version #define __pyx_n_u_version __pyx_mstate_global->__pyx_n_u_version +#define __pyx_n_u_websocket __pyx_mstate_global->__pyx_n_u_websocket #define __pyx_n_s_yarl __pyx_mstate_global->__pyx_n_s_yarl #define __pyx_int_1 __pyx_mstate_global->__pyx_int_1 #define __pyx_int_4084195 __pyx_mstate_global->__pyx_int_4084195 @@ -4835,39 +4882,40 @@ static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { #define __pyx_int_213037754 __pyx_mstate_global->__pyx_int_213037754 #define __pyx_int_257960607 __pyx_mstate_global->__pyx_int_257960607 #define __pyx_tuple__5 __pyx_mstate_global->__pyx_tuple__5 -#define __pyx_tuple__11 __pyx_mstate_global->__pyx_tuple__11 -#define __pyx_tuple__13 __pyx_mstate_global->__pyx_tuple__13 +#define __pyx_tuple__7 __pyx_mstate_global->__pyx_tuple__7 +#define __pyx_tuple__12 __pyx_mstate_global->__pyx_tuple__12 #define __pyx_tuple__14 __pyx_mstate_global->__pyx_tuple__14 #define __pyx_tuple__15 __pyx_mstate_global->__pyx_tuple__15 -#define __pyx_tuple__17 __pyx_mstate_global->__pyx_tuple__17 -#define __pyx_tuple__19 __pyx_mstate_global->__pyx_tuple__19 -#define __pyx_tuple__23 __pyx_mstate_global->__pyx_tuple__23 -#define __pyx_tuple__25 __pyx_mstate_global->__pyx_tuple__25 -#define __pyx_tuple__27 __pyx_mstate_global->__pyx_tuple__27 -#define __pyx_tuple__29 __pyx_mstate_global->__pyx_tuple__29 -#define __pyx_tuple__36 __pyx_mstate_global->__pyx_tuple__36 -#define __pyx_codeobj__16 __pyx_mstate_global->__pyx_codeobj__16 -#define __pyx_codeobj__18 __pyx_mstate_global->__pyx_codeobj__18 -#define __pyx_codeobj__20 __pyx_mstate_global->__pyx_codeobj__20 +#define __pyx_tuple__16 __pyx_mstate_global->__pyx_tuple__16 +#define __pyx_tuple__18 __pyx_mstate_global->__pyx_tuple__18 +#define __pyx_tuple__20 __pyx_mstate_global->__pyx_tuple__20 +#define __pyx_tuple__24 __pyx_mstate_global->__pyx_tuple__24 +#define __pyx_tuple__26 __pyx_mstate_global->__pyx_tuple__26 +#define __pyx_tuple__28 __pyx_mstate_global->__pyx_tuple__28 +#define __pyx_tuple__30 __pyx_mstate_global->__pyx_tuple__30 +#define __pyx_tuple__37 __pyx_mstate_global->__pyx_tuple__37 +#define __pyx_codeobj__17 __pyx_mstate_global->__pyx_codeobj__17 +#define __pyx_codeobj__19 __pyx_mstate_global->__pyx_codeobj__19 #define __pyx_codeobj__21 __pyx_mstate_global->__pyx_codeobj__21 #define __pyx_codeobj__22 __pyx_mstate_global->__pyx_codeobj__22 -#define __pyx_codeobj__24 __pyx_mstate_global->__pyx_codeobj__24 -#define __pyx_codeobj__26 __pyx_mstate_global->__pyx_codeobj__26 -#define __pyx_codeobj__28 __pyx_mstate_global->__pyx_codeobj__28 -#define __pyx_codeobj__30 __pyx_mstate_global->__pyx_codeobj__30 +#define __pyx_codeobj__23 __pyx_mstate_global->__pyx_codeobj__23 +#define __pyx_codeobj__25 __pyx_mstate_global->__pyx_codeobj__25 +#define __pyx_codeobj__27 __pyx_mstate_global->__pyx_codeobj__27 +#define __pyx_codeobj__29 __pyx_mstate_global->__pyx_codeobj__29 #define __pyx_codeobj__31 __pyx_mstate_global->__pyx_codeobj__31 #define __pyx_codeobj__32 __pyx_mstate_global->__pyx_codeobj__32 #define __pyx_codeobj__33 __pyx_mstate_global->__pyx_codeobj__33 #define __pyx_codeobj__34 __pyx_mstate_global->__pyx_codeobj__34 #define __pyx_codeobj__35 __pyx_mstate_global->__pyx_codeobj__35 -#define __pyx_codeobj__37 __pyx_mstate_global->__pyx_codeobj__37 +#define __pyx_codeobj__36 __pyx_mstate_global->__pyx_codeobj__36 #define __pyx_codeobj__38 __pyx_mstate_global->__pyx_codeobj__38 +#define __pyx_codeobj__39 __pyx_mstate_global->__pyx_codeobj__39 /* #### Code section: module_code ### */ /* "cpython/complex.pxd":19 * * @property - * cdef inline double real(self): # <<<<<<<<<<<<<< + * cdef inline double real(self) noexcept: # <<<<<<<<<<<<<< * return self.cval.real * */ @@ -4877,7 +4925,7 @@ static CYTHON_INLINE double __pyx_f_7cpython_7complex_7complex_4real_real(PyComp /* "cpython/complex.pxd":20 * @property - * cdef inline double real(self): + * cdef inline double real(self) noexcept: * return self.cval.real # <<<<<<<<<<<<<< * * @property @@ -4888,7 +4936,7 @@ static CYTHON_INLINE double __pyx_f_7cpython_7complex_7complex_4real_real(PyComp /* "cpython/complex.pxd":19 * * @property - * cdef inline double real(self): # <<<<<<<<<<<<<< + * cdef inline double real(self) noexcept: # <<<<<<<<<<<<<< * return self.cval.real * */ @@ -4901,7 +4949,7 @@ static CYTHON_INLINE double __pyx_f_7cpython_7complex_7complex_4real_real(PyComp /* "cpython/complex.pxd":23 * * @property - * cdef inline double imag(self): # <<<<<<<<<<<<<< + * cdef inline double imag(self) noexcept: # <<<<<<<<<<<<<< * return self.cval.imag * */ @@ -4911,7 +4959,7 @@ static CYTHON_INLINE double __pyx_f_7cpython_7complex_7complex_4imag_imag(PyComp /* "cpython/complex.pxd":24 * @property - * cdef inline double imag(self): + * cdef inline double imag(self) noexcept: * return self.cval.imag # <<<<<<<<<<<<<< * * # PyTypeObject PyComplex_Type @@ -4922,7 +4970,7 @@ static CYTHON_INLINE double __pyx_f_7cpython_7complex_7complex_4imag_imag(PyComp /* "cpython/complex.pxd":23 * * @property - * cdef inline double imag(self): # <<<<<<<<<<<<<< + * cdef inline double imag(self) noexcept: # <<<<<<<<<<<<<< * return self.cval.imag * */ @@ -5157,7 +5205,7 @@ static CYTHON_INLINE PyObject *__pyx_f_7cpython_11contextvars_get_value_no_defau return __pyx_r; } -/* "aiohttp/_http_parser.pyx":74 +/* "aiohttp/_http_parser.pyx":75 * * * cdef inline object extend(object buf, const char* at, size_t length): # <<<<<<<<<<<<<< @@ -5177,26 +5225,26 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_extend(PyObject * int __pyx_clineno = 0; __Pyx_RefNannySetupContext("extend", 1); - /* "aiohttp/_http_parser.pyx":77 + /* "aiohttp/_http_parser.pyx":78 * cdef Py_ssize_t s * cdef char* ptr * s = PyByteArray_Size(buf) # <<<<<<<<<<<<<< * PyByteArray_Resize(buf, s + length) * ptr = PyByteArray_AsString(buf) */ - __pyx_t_1 = PyByteArray_Size(__pyx_v_buf); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1L))) __PYX_ERR(0, 77, __pyx_L1_error) + __pyx_t_1 = PyByteArray_Size(__pyx_v_buf); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1L))) __PYX_ERR(0, 78, __pyx_L1_error) __pyx_v_s = __pyx_t_1; - /* "aiohttp/_http_parser.pyx":78 + /* "aiohttp/_http_parser.pyx":79 * cdef char* ptr * s = PyByteArray_Size(buf) * PyByteArray_Resize(buf, s + length) # <<<<<<<<<<<<<< * ptr = PyByteArray_AsString(buf) * memcpy(ptr + s, at, length) */ - __pyx_t_2 = PyByteArray_Resize(__pyx_v_buf, (__pyx_v_s + __pyx_v_length)); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 78, __pyx_L1_error) + __pyx_t_2 = PyByteArray_Resize(__pyx_v_buf, (__pyx_v_s + __pyx_v_length)); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 79, __pyx_L1_error) - /* "aiohttp/_http_parser.pyx":79 + /* "aiohttp/_http_parser.pyx":80 * s = PyByteArray_Size(buf) * PyByteArray_Resize(buf, s + length) * ptr = PyByteArray_AsString(buf) # <<<<<<<<<<<<<< @@ -5205,7 +5253,7 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_extend(PyObject * */ __pyx_v_ptr = PyByteArray_AsString(__pyx_v_buf); - /* "aiohttp/_http_parser.pyx":80 + /* "aiohttp/_http_parser.pyx":81 * PyByteArray_Resize(buf, s + length) * ptr = PyByteArray_AsString(buf) * memcpy(ptr + s, at, length) # <<<<<<<<<<<<<< @@ -5214,7 +5262,7 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_extend(PyObject * */ (void)(memcpy((__pyx_v_ptr + __pyx_v_s), __pyx_v_at, __pyx_v_length)); - /* "aiohttp/_http_parser.pyx":74 + /* "aiohttp/_http_parser.pyx":75 * * * cdef inline object extend(object buf, const char* at, size_t length): # <<<<<<<<<<<<<< @@ -5234,7 +5282,7 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_extend(PyObject * return __pyx_r; } -/* "aiohttp/_http_parser.pyx":92 +/* "aiohttp/_http_parser.pyx":93 * * * cdef inline str http_method_str(int i): # <<<<<<<<<<<<<< @@ -5252,7 +5300,7 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_http_method_str(i int __pyx_clineno = 0; __Pyx_RefNannySetupContext("http_method_str", 1); - /* "aiohttp/_http_parser.pyx":93 + /* "aiohttp/_http_parser.pyx":94 * * cdef inline str http_method_str(int i): * if i < METHODS_COUNT: # <<<<<<<<<<<<<< @@ -5262,7 +5310,7 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_http_method_str(i __pyx_t_1 = (__pyx_v_i < 46); if (__pyx_t_1) { - /* "aiohttp/_http_parser.pyx":94 + /* "aiohttp/_http_parser.pyx":95 * cdef inline str http_method_str(int i): * if i < METHODS_COUNT: * return _http_method[i] # <<<<<<<<<<<<<< @@ -5272,16 +5320,16 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_http_method_str(i __Pyx_XDECREF(__pyx_r); if (unlikely(__pyx_v_7aiohttp_12_http_parser__http_method == Py_None)) { PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(0, 94, __pyx_L1_error) + __PYX_ERR(0, 95, __pyx_L1_error) } - __pyx_t_2 = __Pyx_GetItemInt_List(__pyx_v_7aiohttp_12_http_parser__http_method, __pyx_v_i, int, 1, __Pyx_PyInt_From_int, 1, 1, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 94, __pyx_L1_error) + __pyx_t_2 = __Pyx_GetItemInt_List(__pyx_v_7aiohttp_12_http_parser__http_method, __pyx_v_i, int, 1, __Pyx_PyInt_From_int, 1, 1, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 95, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_INCREF(((PyObject*)__pyx_t_2)); __pyx_r = ((PyObject*)__pyx_t_2); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; goto __pyx_L0; - /* "aiohttp/_http_parser.pyx":93 + /* "aiohttp/_http_parser.pyx":94 * * cdef inline str http_method_str(int i): * if i < METHODS_COUNT: # <<<<<<<<<<<<<< @@ -5290,7 +5338,7 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_http_method_str(i */ } - /* "aiohttp/_http_parser.pyx":96 + /* "aiohttp/_http_parser.pyx":97 * return _http_method[i] * else: * return "" # <<<<<<<<<<<<<< @@ -5304,7 +5352,7 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_http_method_str(i goto __pyx_L0; } - /* "aiohttp/_http_parser.pyx":92 + /* "aiohttp/_http_parser.pyx":93 * * * cdef inline str http_method_str(int i): # <<<<<<<<<<<<<< @@ -5323,7 +5371,7 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_http_method_str(i return __pyx_r; } -/* "aiohttp/_http_parser.pyx":98 +/* "aiohttp/_http_parser.pyx":99 * return "" * * cdef inline object find_header(bytes raw_header): # <<<<<<<<<<<<<< @@ -5345,16 +5393,16 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_find_header(PyObj int __pyx_clineno = 0; __Pyx_RefNannySetupContext("find_header", 1); - /* "aiohttp/_http_parser.pyx":102 + /* "aiohttp/_http_parser.pyx":103 * cdef char *buf * cdef int idx * PyBytes_AsStringAndSize(raw_header, &buf, &size) # <<<<<<<<<<<<<< * idx = _find_header.find_header(buf, size) * if idx == -1: */ - __pyx_t_1 = PyBytes_AsStringAndSize(__pyx_v_raw_header, (&__pyx_v_buf), (&__pyx_v_size)); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 102, __pyx_L1_error) + __pyx_t_1 = PyBytes_AsStringAndSize(__pyx_v_raw_header, (&__pyx_v_buf), (&__pyx_v_size)); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 103, __pyx_L1_error) - /* "aiohttp/_http_parser.pyx":103 + /* "aiohttp/_http_parser.pyx":104 * cdef int idx * PyBytes_AsStringAndSize(raw_header, &buf, &size) * idx = _find_header.find_header(buf, size) # <<<<<<<<<<<<<< @@ -5363,7 +5411,7 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_find_header(PyObj */ __pyx_v_idx = find_header(__pyx_v_buf, __pyx_v_size); - /* "aiohttp/_http_parser.pyx":104 + /* "aiohttp/_http_parser.pyx":105 * PyBytes_AsStringAndSize(raw_header, &buf, &size) * idx = _find_header.find_header(buf, size) * if idx == -1: # <<<<<<<<<<<<<< @@ -5373,7 +5421,7 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_find_header(PyObj __pyx_t_2 = (__pyx_v_idx == -1L); if (__pyx_t_2) { - /* "aiohttp/_http_parser.pyx":105 + /* "aiohttp/_http_parser.pyx":106 * idx = _find_header.find_header(buf, size) * if idx == -1: * return raw_header.decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< @@ -5383,15 +5431,15 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_find_header(PyObj __Pyx_XDECREF(__pyx_r); if (unlikely(__pyx_v_raw_header == Py_None)) { PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "decode"); - __PYX_ERR(0, 105, __pyx_L1_error) + __PYX_ERR(0, 106, __pyx_L1_error) } - __pyx_t_3 = __Pyx_decode_bytes(__pyx_v_raw_header, 0, PY_SSIZE_T_MAX, NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 105, __pyx_L1_error) + __pyx_t_3 = __Pyx_decode_bytes(__pyx_v_raw_header, 0, PY_SSIZE_T_MAX, NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 106, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_r = __pyx_t_3; __pyx_t_3 = 0; goto __pyx_L0; - /* "aiohttp/_http_parser.pyx":104 + /* "aiohttp/_http_parser.pyx":105 * PyBytes_AsStringAndSize(raw_header, &buf, &size) * idx = _find_header.find_header(buf, size) * if idx == -1: # <<<<<<<<<<<<<< @@ -5400,7 +5448,7 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_find_header(PyObj */ } - /* "aiohttp/_http_parser.pyx":106 + /* "aiohttp/_http_parser.pyx":107 * if idx == -1: * return raw_header.decode('utf-8', 'surrogateescape') * return headers[idx] # <<<<<<<<<<<<<< @@ -5410,15 +5458,15 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_find_header(PyObj __Pyx_XDECREF(__pyx_r); if (unlikely(__pyx_v_7aiohttp_12_http_parser_headers == Py_None)) { PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(0, 106, __pyx_L1_error) + __PYX_ERR(0, 107, __pyx_L1_error) } - __pyx_t_3 = __Pyx_GetItemInt_Tuple(__pyx_v_7aiohttp_12_http_parser_headers, __pyx_v_idx, int, 1, __Pyx_PyInt_From_int, 0, 1, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 106, __pyx_L1_error) + __pyx_t_3 = __Pyx_GetItemInt_Tuple(__pyx_v_7aiohttp_12_http_parser_headers, __pyx_v_idx, int, 1, __Pyx_PyInt_From_int, 0, 1, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 107, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_r = __pyx_t_3; __pyx_t_3 = 0; goto __pyx_L0; - /* "aiohttp/_http_parser.pyx":98 + /* "aiohttp/_http_parser.pyx":99 * return "" * * cdef inline object find_header(bytes raw_header): # <<<<<<<<<<<<<< @@ -5437,7 +5485,7 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_find_header(PyObj return __pyx_r; } -/* "aiohttp/_http_parser.pyx":122 +/* "aiohttp/_http_parser.pyx":123 * cdef readonly object url # yarl.URL * * def __init__(self, method, path, version, headers, raw_headers, # <<<<<<<<<<<<<< @@ -5508,7 +5556,7 @@ static int __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_1__init__(PyObje (void)__Pyx_Arg_NewRef_VARARGS(values[0]); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 122, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 123, __pyx_L3_error) else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: @@ -5516,9 +5564,9 @@ static int __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_1__init__(PyObje (void)__Pyx_Arg_NewRef_VARARGS(values[1]); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 122, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 123, __pyx_L3_error) else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 1); __PYX_ERR(0, 122, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 1); __PYX_ERR(0, 123, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 2: @@ -5526,9 +5574,9 @@ static int __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_1__init__(PyObje (void)__Pyx_Arg_NewRef_VARARGS(values[2]); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 122, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 123, __pyx_L3_error) else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 2); __PYX_ERR(0, 122, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 2); __PYX_ERR(0, 123, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 3: @@ -5536,9 +5584,9 @@ static int __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_1__init__(PyObje (void)__Pyx_Arg_NewRef_VARARGS(values[3]); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 122, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 123, __pyx_L3_error) else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 3); __PYX_ERR(0, 122, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 3); __PYX_ERR(0, 123, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 4: @@ -5546,9 +5594,9 @@ static int __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_1__init__(PyObje (void)__Pyx_Arg_NewRef_VARARGS(values[4]); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 122, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 123, __pyx_L3_error) else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 4); __PYX_ERR(0, 122, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 4); __PYX_ERR(0, 123, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 5: @@ -5556,9 +5604,9 @@ static int __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_1__init__(PyObje (void)__Pyx_Arg_NewRef_VARARGS(values[5]); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 122, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 123, __pyx_L3_error) else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 5); __PYX_ERR(0, 122, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 5); __PYX_ERR(0, 123, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 6: @@ -5566,9 +5614,9 @@ static int __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_1__init__(PyObje (void)__Pyx_Arg_NewRef_VARARGS(values[6]); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 122, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 123, __pyx_L3_error) else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 6); __PYX_ERR(0, 122, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 6); __PYX_ERR(0, 123, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 7: @@ -5576,9 +5624,9 @@ static int __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_1__init__(PyObje (void)__Pyx_Arg_NewRef_VARARGS(values[7]); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 122, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 123, __pyx_L3_error) else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 7); __PYX_ERR(0, 122, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 7); __PYX_ERR(0, 123, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 8: @@ -5586,9 +5634,9 @@ static int __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_1__init__(PyObje (void)__Pyx_Arg_NewRef_VARARGS(values[8]); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 122, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 123, __pyx_L3_error) else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 8); __PYX_ERR(0, 122, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 8); __PYX_ERR(0, 123, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 9: @@ -5596,14 +5644,14 @@ static int __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_1__init__(PyObje (void)__Pyx_Arg_NewRef_VARARGS(values[9]); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 122, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 123, __pyx_L3_error) else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 9); __PYX_ERR(0, 122, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 9); __PYX_ERR(0, 123, __pyx_L3_error) } } if (unlikely(kw_args > 0)) { const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__init__") < 0)) __PYX_ERR(0, 122, __pyx_L3_error) + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__init__") < 0)) __PYX_ERR(0, 123, __pyx_L3_error) } } else if (unlikely(__pyx_nargs != 10)) { goto __pyx_L5_argtuple_error; @@ -5632,7 +5680,7 @@ static int __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_1__init__(PyObje } goto __pyx_L6_skip; __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, __pyx_nargs); __PYX_ERR(0, 122, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, __pyx_nargs); __PYX_ERR(0, 123, __pyx_L3_error) __pyx_L6_skip:; goto __pyx_L4_argument_unpacking_done; __pyx_L3_error:; @@ -5668,14 +5716,14 @@ static int __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage___init__(struct int __pyx_clineno = 0; __Pyx_RefNannySetupContext("__init__", 1); - /* "aiohttp/_http_parser.pyx":124 + /* "aiohttp/_http_parser.pyx":125 * def __init__(self, method, path, version, headers, raw_headers, * should_close, compression, upgrade, chunked, url): * self.method = method # <<<<<<<<<<<<<< * self.path = path * self.version = version */ - if (!(likely(PyUnicode_CheckExact(__pyx_v_method))||((__pyx_v_method) == Py_None) || __Pyx_RaiseUnexpectedTypeError("unicode", __pyx_v_method))) __PYX_ERR(0, 124, __pyx_L1_error) + if (!(likely(PyUnicode_CheckExact(__pyx_v_method))||((__pyx_v_method) == Py_None) || __Pyx_RaiseUnexpectedTypeError("unicode", __pyx_v_method))) __PYX_ERR(0, 125, __pyx_L1_error) __pyx_t_1 = __pyx_v_method; __Pyx_INCREF(__pyx_t_1); __Pyx_GIVEREF(__pyx_t_1); @@ -5684,14 +5732,14 @@ static int __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage___init__(struct __pyx_v_self->method = ((PyObject*)__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":125 + /* "aiohttp/_http_parser.pyx":126 * should_close, compression, upgrade, chunked, url): * self.method = method * self.path = path # <<<<<<<<<<<<<< * self.version = version * self.headers = headers */ - if (!(likely(PyUnicode_CheckExact(__pyx_v_path))||((__pyx_v_path) == Py_None) || __Pyx_RaiseUnexpectedTypeError("unicode", __pyx_v_path))) __PYX_ERR(0, 125, __pyx_L1_error) + if (!(likely(PyUnicode_CheckExact(__pyx_v_path))||((__pyx_v_path) == Py_None) || __Pyx_RaiseUnexpectedTypeError("unicode", __pyx_v_path))) __PYX_ERR(0, 126, __pyx_L1_error) __pyx_t_1 = __pyx_v_path; __Pyx_INCREF(__pyx_t_1); __Pyx_GIVEREF(__pyx_t_1); @@ -5700,7 +5748,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage___init__(struct __pyx_v_self->path = ((PyObject*)__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":126 + /* "aiohttp/_http_parser.pyx":127 * self.method = method * self.path = path * self.version = version # <<<<<<<<<<<<<< @@ -5713,7 +5761,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage___init__(struct __Pyx_DECREF(__pyx_v_self->version); __pyx_v_self->version = __pyx_v_version; - /* "aiohttp/_http_parser.pyx":127 + /* "aiohttp/_http_parser.pyx":128 * self.path = path * self.version = version * self.headers = headers # <<<<<<<<<<<<<< @@ -5726,7 +5774,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage___init__(struct __Pyx_DECREF(__pyx_v_self->headers); __pyx_v_self->headers = __pyx_v_headers; - /* "aiohttp/_http_parser.pyx":128 + /* "aiohttp/_http_parser.pyx":129 * self.version = version * self.headers = headers * self.raw_headers = raw_headers # <<<<<<<<<<<<<< @@ -5739,7 +5787,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage___init__(struct __Pyx_DECREF(__pyx_v_self->raw_headers); __pyx_v_self->raw_headers = __pyx_v_raw_headers; - /* "aiohttp/_http_parser.pyx":129 + /* "aiohttp/_http_parser.pyx":130 * self.headers = headers * self.raw_headers = raw_headers * self.should_close = should_close # <<<<<<<<<<<<<< @@ -5752,7 +5800,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage___init__(struct __Pyx_DECREF(__pyx_v_self->should_close); __pyx_v_self->should_close = __pyx_v_should_close; - /* "aiohttp/_http_parser.pyx":130 + /* "aiohttp/_http_parser.pyx":131 * self.raw_headers = raw_headers * self.should_close = should_close * self.compression = compression # <<<<<<<<<<<<<< @@ -5765,7 +5813,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage___init__(struct __Pyx_DECREF(__pyx_v_self->compression); __pyx_v_self->compression = __pyx_v_compression; - /* "aiohttp/_http_parser.pyx":131 + /* "aiohttp/_http_parser.pyx":132 * self.should_close = should_close * self.compression = compression * self.upgrade = upgrade # <<<<<<<<<<<<<< @@ -5778,7 +5826,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage___init__(struct __Pyx_DECREF(__pyx_v_self->upgrade); __pyx_v_self->upgrade = __pyx_v_upgrade; - /* "aiohttp/_http_parser.pyx":132 + /* "aiohttp/_http_parser.pyx":133 * self.compression = compression * self.upgrade = upgrade * self.chunked = chunked # <<<<<<<<<<<<<< @@ -5791,7 +5839,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage___init__(struct __Pyx_DECREF(__pyx_v_self->chunked); __pyx_v_self->chunked = __pyx_v_chunked; - /* "aiohttp/_http_parser.pyx":133 + /* "aiohttp/_http_parser.pyx":134 * self.upgrade = upgrade * self.chunked = chunked * self.url = url # <<<<<<<<<<<<<< @@ -5804,7 +5852,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage___init__(struct __Pyx_DECREF(__pyx_v_self->url); __pyx_v_self->url = __pyx_v_url; - /* "aiohttp/_http_parser.pyx":122 + /* "aiohttp/_http_parser.pyx":123 * cdef readonly object url # yarl.URL * * def __init__(self, method, path, version, headers, raw_headers, # <<<<<<<<<<<<<< @@ -5824,7 +5872,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage___init__(struct return __pyx_r; } -/* "aiohttp/_http_parser.pyx":135 +/* "aiohttp/_http_parser.pyx":136 * self.url = url * * def __repr__(self): # <<<<<<<<<<<<<< @@ -5848,7 +5896,7 @@ static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_3__repr__( } static PyObject *__pyx_gb_7aiohttp_12_http_parser_17RawRequestMessage_8__repr___2generator(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value); /* proto */ -/* "aiohttp/_http_parser.pyx":147 +/* "aiohttp/_http_parser.pyx":148 * info.append(("chunked", self.chunked)) * info.append(("url", self.url)) * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) # <<<<<<<<<<<<<< @@ -5868,7 +5916,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_8__repr___ if (unlikely(!__pyx_cur_scope)) { __pyx_cur_scope = ((struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct__genexpr *)Py_None); __Pyx_INCREF(Py_None); - __PYX_ERR(0, 147, __pyx_L1_error) + __PYX_ERR(0, 148, __pyx_L1_error) } else { __Pyx_GOTREF((PyObject *)__pyx_cur_scope); } @@ -5876,7 +5924,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_8__repr___ __Pyx_INCREF(__pyx_cur_scope->__pyx_genexpr_arg_0); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_genexpr_arg_0); { - __pyx_CoroutineObject *gen = __Pyx_Generator_New((__pyx_coroutine_body_t) __pyx_gb_7aiohttp_12_http_parser_17RawRequestMessage_8__repr___2generator, NULL, (PyObject *) __pyx_cur_scope, __pyx_n_s_genexpr, __pyx_n_s_repr___locals_genexpr, __pyx_n_s_aiohttp__http_parser); if (unlikely(!gen)) __PYX_ERR(0, 147, __pyx_L1_error) + __pyx_CoroutineObject *gen = __Pyx_Generator_New((__pyx_coroutine_body_t) __pyx_gb_7aiohttp_12_http_parser_17RawRequestMessage_8__repr___2generator, NULL, (PyObject *) __pyx_cur_scope, __pyx_n_s_genexpr, __pyx_n_s_repr___locals_genexpr, __pyx_n_s_aiohttp__http_parser); if (unlikely(!gen)) __PYX_ERR(0, 148, __pyx_L1_error) __Pyx_DECREF(__pyx_cur_scope); __Pyx_RefNannyFinishContext(); return (PyObject *) gen; @@ -5915,24 +5963,24 @@ static PyObject *__pyx_gb_7aiohttp_12_http_parser_17RawRequestMessage_8__repr___ return NULL; } __pyx_L3_first_run:; - if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 147, __pyx_L1_error) - __pyx_r = PyList_New(0); if (unlikely(!__pyx_r)) __PYX_ERR(0, 147, __pyx_L1_error) + if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 148, __pyx_L1_error) + __pyx_r = PyList_New(0); if (unlikely(!__pyx_r)) __PYX_ERR(0, 148, __pyx_L1_error) __Pyx_GOTREF(__pyx_r); - if (unlikely(!__pyx_cur_scope->__pyx_genexpr_arg_0)) { __Pyx_RaiseUnboundLocalError(".0"); __PYX_ERR(0, 147, __pyx_L1_error) } + if (unlikely(!__pyx_cur_scope->__pyx_genexpr_arg_0)) { __Pyx_RaiseUnboundLocalError(".0"); __PYX_ERR(0, 148, __pyx_L1_error) } __pyx_t_1 = __pyx_cur_scope->__pyx_genexpr_arg_0; __Pyx_INCREF(__pyx_t_1); __pyx_t_2 = 0; for (;;) { { Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_1); #if !CYTHON_ASSUME_SAFE_MACROS - if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 147, __pyx_L1_error) + if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 148, __pyx_L1_error) #endif if (__pyx_t_2 >= __pyx_temp) break; } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_3 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_3); __pyx_t_2++; if (unlikely((0 < 0))) __PYX_ERR(0, 147, __pyx_L1_error) + __pyx_t_3 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_3); __pyx_t_2++; if (unlikely((0 < 0))) __PYX_ERR(0, 148, __pyx_L1_error) #else - __pyx_t_3 = __Pyx_PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 147, __pyx_L1_error) + __pyx_t_3 = __Pyx_PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 148, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); #endif if ((likely(PyTuple_CheckExact(__pyx_t_3))) || (PyList_CheckExact(__pyx_t_3))) { @@ -5941,7 +5989,7 @@ static PyObject *__pyx_gb_7aiohttp_12_http_parser_17RawRequestMessage_8__repr___ if (unlikely(size != 2)) { if (size > 2) __Pyx_RaiseTooManyValuesError(2); else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); - __PYX_ERR(0, 147, __pyx_L1_error) + __PYX_ERR(0, 148, __pyx_L1_error) } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS if (likely(PyTuple_CheckExact(sequence))) { @@ -5954,15 +6002,15 @@ static PyObject *__pyx_gb_7aiohttp_12_http_parser_17RawRequestMessage_8__repr___ __Pyx_INCREF(__pyx_t_4); __Pyx_INCREF(__pyx_t_5); #else - __pyx_t_4 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 147, __pyx_L1_error) + __pyx_t_4 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 148, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); - __pyx_t_5 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 147, __pyx_L1_error) + __pyx_t_5 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 148, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); #endif __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; } else { Py_ssize_t index = -1; - __pyx_t_6 = PyObject_GetIter(__pyx_t_3); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 147, __pyx_L1_error) + __pyx_t_6 = PyObject_GetIter(__pyx_t_3); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 148, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_t_7 = __Pyx_PyObject_GetIterNextFunc(__pyx_t_6); @@ -5970,7 +6018,7 @@ static PyObject *__pyx_gb_7aiohttp_12_http_parser_17RawRequestMessage_8__repr___ __Pyx_GOTREF(__pyx_t_4); index = 1; __pyx_t_5 = __pyx_t_7(__pyx_t_6); if (unlikely(!__pyx_t_5)) goto __pyx_L6_unpacking_failed; __Pyx_GOTREF(__pyx_t_5); - if (__Pyx_IternextUnpackEndCheck(__pyx_t_7(__pyx_t_6), 2) < 0) __PYX_ERR(0, 147, __pyx_L1_error) + if (__Pyx_IternextUnpackEndCheck(__pyx_t_7(__pyx_t_6), 2) < 0) __PYX_ERR(0, 148, __pyx_L1_error) __pyx_t_7 = NULL; __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; goto __pyx_L7_unpacking_done; @@ -5978,7 +6026,7 @@ static PyObject *__pyx_gb_7aiohttp_12_http_parser_17RawRequestMessage_8__repr___ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __pyx_t_7 = NULL; if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); - __PYX_ERR(0, 147, __pyx_L1_error) + __PYX_ERR(0, 148, __pyx_L1_error) __pyx_L7_unpacking_done:; } __Pyx_XGOTREF(__pyx_cur_scope->__pyx_v_name); @@ -5989,15 +6037,15 @@ static PyObject *__pyx_gb_7aiohttp_12_http_parser_17RawRequestMessage_8__repr___ __Pyx_XDECREF_SET(__pyx_cur_scope->__pyx_v_val, __pyx_t_5); __Pyx_GIVEREF(__pyx_t_5); __pyx_t_5 = 0; - __pyx_t_3 = PyNumber_Add(__pyx_cur_scope->__pyx_v_name, __pyx_kp_u_); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 147, __pyx_L1_error) + __pyx_t_3 = PyNumber_Add(__pyx_cur_scope->__pyx_v_name, __pyx_kp_u_); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 148, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __pyx_t_5 = PyObject_Repr(__pyx_cur_scope->__pyx_v_val); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 147, __pyx_L1_error) + __pyx_t_5 = PyObject_Repr(__pyx_cur_scope->__pyx_v_val); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 148, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); - __pyx_t_4 = PyNumber_Add(__pyx_t_3, __pyx_t_5); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 147, __pyx_L1_error) + __pyx_t_4 = PyNumber_Add(__pyx_t_3, __pyx_t_5); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 148, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - if (unlikely(__Pyx_ListComp_Append(__pyx_r, (PyObject*)__pyx_t_4))) __PYX_ERR(0, 147, __pyx_L1_error) + if (unlikely(__Pyx_ListComp_Append(__pyx_r, (PyObject*)__pyx_t_4))) __PYX_ERR(0, 148, __pyx_L1_error) __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; } __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; @@ -6025,7 +6073,7 @@ static PyObject *__pyx_gb_7aiohttp_12_http_parser_17RawRequestMessage_8__repr___ return __pyx_r; } -/* "aiohttp/_http_parser.pyx":135 +/* "aiohttp/_http_parser.pyx":136 * self.url = url * * def __repr__(self): # <<<<<<<<<<<<<< @@ -6047,217 +6095,217 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_2__repr__( int __pyx_clineno = 0; __Pyx_RefNannySetupContext("__repr__", 1); - /* "aiohttp/_http_parser.pyx":136 + /* "aiohttp/_http_parser.pyx":137 * * def __repr__(self): * info = [] # <<<<<<<<<<<<<< * info.append(("method", self.method)) * info.append(("path", self.path)) */ - __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 136, __pyx_L1_error) + __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 137, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_v_info = ((PyObject*)__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":137 + /* "aiohttp/_http_parser.pyx":138 * def __repr__(self): * info = [] * info.append(("method", self.method)) # <<<<<<<<<<<<<< * info.append(("path", self.path)) * info.append(("version", self.version)) */ - __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 137, __pyx_L1_error) + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 138, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_INCREF(__pyx_n_u_method); __Pyx_GIVEREF(__pyx_n_u_method); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_method)) __PYX_ERR(0, 137, __pyx_L1_error); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_method)) __PYX_ERR(0, 138, __pyx_L1_error); __Pyx_INCREF(__pyx_v_self->method); __Pyx_GIVEREF(__pyx_v_self->method); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->method)) __PYX_ERR(0, 137, __pyx_L1_error); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 137, __pyx_L1_error) + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->method)) __PYX_ERR(0, 138, __pyx_L1_error); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 138, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":138 + /* "aiohttp/_http_parser.pyx":139 * info = [] * info.append(("method", self.method)) * info.append(("path", self.path)) # <<<<<<<<<<<<<< * info.append(("version", self.version)) * info.append(("headers", self.headers)) */ - __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 138, __pyx_L1_error) + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 139, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_INCREF(__pyx_n_u_path); __Pyx_GIVEREF(__pyx_n_u_path); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_path)) __PYX_ERR(0, 138, __pyx_L1_error); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_path)) __PYX_ERR(0, 139, __pyx_L1_error); __Pyx_INCREF(__pyx_v_self->path); __Pyx_GIVEREF(__pyx_v_self->path); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->path)) __PYX_ERR(0, 138, __pyx_L1_error); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 138, __pyx_L1_error) + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->path)) __PYX_ERR(0, 139, __pyx_L1_error); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 139, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":139 + /* "aiohttp/_http_parser.pyx":140 * info.append(("method", self.method)) * info.append(("path", self.path)) * info.append(("version", self.version)) # <<<<<<<<<<<<<< * info.append(("headers", self.headers)) * info.append(("raw_headers", self.raw_headers)) */ - __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 139, __pyx_L1_error) + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 140, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_INCREF(__pyx_n_u_version); __Pyx_GIVEREF(__pyx_n_u_version); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_version)) __PYX_ERR(0, 139, __pyx_L1_error); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_version)) __PYX_ERR(0, 140, __pyx_L1_error); __Pyx_INCREF(__pyx_v_self->version); __Pyx_GIVEREF(__pyx_v_self->version); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->version)) __PYX_ERR(0, 139, __pyx_L1_error); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 139, __pyx_L1_error) + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->version)) __PYX_ERR(0, 140, __pyx_L1_error); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 140, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":140 + /* "aiohttp/_http_parser.pyx":141 * info.append(("path", self.path)) * info.append(("version", self.version)) * info.append(("headers", self.headers)) # <<<<<<<<<<<<<< * info.append(("raw_headers", self.raw_headers)) * info.append(("should_close", self.should_close)) */ - __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 140, __pyx_L1_error) + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 141, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_INCREF(__pyx_n_u_headers); __Pyx_GIVEREF(__pyx_n_u_headers); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_headers)) __PYX_ERR(0, 140, __pyx_L1_error); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_headers)) __PYX_ERR(0, 141, __pyx_L1_error); __Pyx_INCREF(__pyx_v_self->headers); __Pyx_GIVEREF(__pyx_v_self->headers); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->headers)) __PYX_ERR(0, 140, __pyx_L1_error); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 140, __pyx_L1_error) + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->headers)) __PYX_ERR(0, 141, __pyx_L1_error); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 141, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":141 + /* "aiohttp/_http_parser.pyx":142 * info.append(("version", self.version)) * info.append(("headers", self.headers)) * info.append(("raw_headers", self.raw_headers)) # <<<<<<<<<<<<<< * info.append(("should_close", self.should_close)) * info.append(("compression", self.compression)) */ - __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 141, __pyx_L1_error) + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 142, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_INCREF(__pyx_n_u_raw_headers); __Pyx_GIVEREF(__pyx_n_u_raw_headers); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_raw_headers)) __PYX_ERR(0, 141, __pyx_L1_error); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_raw_headers)) __PYX_ERR(0, 142, __pyx_L1_error); __Pyx_INCREF(__pyx_v_self->raw_headers); __Pyx_GIVEREF(__pyx_v_self->raw_headers); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->raw_headers)) __PYX_ERR(0, 141, __pyx_L1_error); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 141, __pyx_L1_error) + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->raw_headers)) __PYX_ERR(0, 142, __pyx_L1_error); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 142, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":142 + /* "aiohttp/_http_parser.pyx":143 * info.append(("headers", self.headers)) * info.append(("raw_headers", self.raw_headers)) * info.append(("should_close", self.should_close)) # <<<<<<<<<<<<<< * info.append(("compression", self.compression)) * info.append(("upgrade", self.upgrade)) */ - __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 142, __pyx_L1_error) + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 143, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_INCREF(__pyx_n_u_should_close); __Pyx_GIVEREF(__pyx_n_u_should_close); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_should_close)) __PYX_ERR(0, 142, __pyx_L1_error); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_should_close)) __PYX_ERR(0, 143, __pyx_L1_error); __Pyx_INCREF(__pyx_v_self->should_close); __Pyx_GIVEREF(__pyx_v_self->should_close); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->should_close)) __PYX_ERR(0, 142, __pyx_L1_error); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 142, __pyx_L1_error) + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->should_close)) __PYX_ERR(0, 143, __pyx_L1_error); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 143, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":143 + /* "aiohttp/_http_parser.pyx":144 * info.append(("raw_headers", self.raw_headers)) * info.append(("should_close", self.should_close)) * info.append(("compression", self.compression)) # <<<<<<<<<<<<<< * info.append(("upgrade", self.upgrade)) * info.append(("chunked", self.chunked)) */ - __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 143, __pyx_L1_error) + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 144, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_INCREF(__pyx_n_u_compression); __Pyx_GIVEREF(__pyx_n_u_compression); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_compression)) __PYX_ERR(0, 143, __pyx_L1_error); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_compression)) __PYX_ERR(0, 144, __pyx_L1_error); __Pyx_INCREF(__pyx_v_self->compression); __Pyx_GIVEREF(__pyx_v_self->compression); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->compression)) __PYX_ERR(0, 143, __pyx_L1_error); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 143, __pyx_L1_error) + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->compression)) __PYX_ERR(0, 144, __pyx_L1_error); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 144, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":144 + /* "aiohttp/_http_parser.pyx":145 * info.append(("should_close", self.should_close)) * info.append(("compression", self.compression)) * info.append(("upgrade", self.upgrade)) # <<<<<<<<<<<<<< * info.append(("chunked", self.chunked)) * info.append(("url", self.url)) */ - __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 144, __pyx_L1_error) + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 145, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_INCREF(__pyx_n_u_upgrade); __Pyx_GIVEREF(__pyx_n_u_upgrade); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_upgrade)) __PYX_ERR(0, 144, __pyx_L1_error); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_upgrade)) __PYX_ERR(0, 145, __pyx_L1_error); __Pyx_INCREF(__pyx_v_self->upgrade); __Pyx_GIVEREF(__pyx_v_self->upgrade); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->upgrade)) __PYX_ERR(0, 144, __pyx_L1_error); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 144, __pyx_L1_error) + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->upgrade)) __PYX_ERR(0, 145, __pyx_L1_error); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 145, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":145 + /* "aiohttp/_http_parser.pyx":146 * info.append(("compression", self.compression)) * info.append(("upgrade", self.upgrade)) * info.append(("chunked", self.chunked)) # <<<<<<<<<<<<<< * info.append(("url", self.url)) * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) */ - __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 145, __pyx_L1_error) + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 146, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_INCREF(__pyx_n_u_chunked); __Pyx_GIVEREF(__pyx_n_u_chunked); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_chunked)) __PYX_ERR(0, 145, __pyx_L1_error); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_chunked)) __PYX_ERR(0, 146, __pyx_L1_error); __Pyx_INCREF(__pyx_v_self->chunked); __Pyx_GIVEREF(__pyx_v_self->chunked); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->chunked)) __PYX_ERR(0, 145, __pyx_L1_error); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 145, __pyx_L1_error) + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->chunked)) __PYX_ERR(0, 146, __pyx_L1_error); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 146, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":146 + /* "aiohttp/_http_parser.pyx":147 * info.append(("upgrade", self.upgrade)) * info.append(("chunked", self.chunked)) * info.append(("url", self.url)) # <<<<<<<<<<<<<< * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) * return '' */ - __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 146, __pyx_L1_error) + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 147, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_INCREF(__pyx_n_u_url); __Pyx_GIVEREF(__pyx_n_u_url); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_url)) __PYX_ERR(0, 146, __pyx_L1_error); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_url)) __PYX_ERR(0, 147, __pyx_L1_error); __Pyx_INCREF(__pyx_v_self->url); __Pyx_GIVEREF(__pyx_v_self->url); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->url)) __PYX_ERR(0, 146, __pyx_L1_error); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 146, __pyx_L1_error) + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->url)) __PYX_ERR(0, 147, __pyx_L1_error); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 147, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":147 + /* "aiohttp/_http_parser.pyx":148 * info.append(("chunked", self.chunked)) * info.append(("url", self.url)) * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) # <<<<<<<<<<<<<< * return '' * */ - __pyx_t_1 = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_8__repr___genexpr(NULL, __pyx_v_info); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 147, __pyx_L1_error) + __pyx_t_1 = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_8__repr___genexpr(NULL, __pyx_v_info); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 148, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_Generator_Next(__pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 147, __pyx_L1_error) + __pyx_t_3 = __Pyx_Generator_Next(__pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 148, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyUnicode_Join(__pyx_kp_u__2, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 147, __pyx_L1_error) + __pyx_t_1 = PyUnicode_Join(__pyx_kp_u__2, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 148, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_v_sinfo = ((PyObject*)__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":148 + /* "aiohttp/_http_parser.pyx":149 * info.append(("url", self.url)) * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) * return '' # <<<<<<<<<<<<<< @@ -6265,16 +6313,16 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_2__repr__( * def _replace(self, **dct): */ __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __Pyx_PyUnicode_ConcatSafe(__pyx_kp_u_RawRequestMessage, __pyx_v_sinfo); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 148, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyUnicode_ConcatSafe(__pyx_kp_u_RawRequestMessage, __pyx_v_sinfo); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 149, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyUnicode_ConcatInPlace(__pyx_t_1, __pyx_kp_u__3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 148, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyUnicode_ConcatInPlace(__pyx_t_1, __pyx_kp_u__3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 149, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_r = __pyx_t_3; __pyx_t_3 = 0; goto __pyx_L0; - /* "aiohttp/_http_parser.pyx":135 + /* "aiohttp/_http_parser.pyx":136 * self.url = url * * def __repr__(self): # <<<<<<<<<<<<<< @@ -6297,7 +6345,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_2__repr__( return __pyx_r; } -/* "aiohttp/_http_parser.pyx":150 +/* "aiohttp/_http_parser.pyx":151 * return '' * * def _replace(self, **dct): # <<<<<<<<<<<<<< @@ -6377,7 +6425,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace( int __pyx_clineno = 0; __Pyx_RefNannySetupContext("_replace", 1); - /* "aiohttp/_http_parser.pyx":152 + /* "aiohttp/_http_parser.pyx":153 * def _replace(self, **dct): * cdef RawRequestMessage ret * ret = _new_request_message(self.method, # <<<<<<<<<<<<<< @@ -6387,7 +6435,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace( __pyx_t_1 = __pyx_v_self->method; __Pyx_INCREF(__pyx_t_1); - /* "aiohttp/_http_parser.pyx":153 + /* "aiohttp/_http_parser.pyx":154 * cdef RawRequestMessage ret * ret = _new_request_message(self.method, * self.path, # <<<<<<<<<<<<<< @@ -6397,7 +6445,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace( __pyx_t_2 = __pyx_v_self->path; __Pyx_INCREF(__pyx_t_2); - /* "aiohttp/_http_parser.pyx":154 + /* "aiohttp/_http_parser.pyx":155 * ret = _new_request_message(self.method, * self.path, * self.version, # <<<<<<<<<<<<<< @@ -6407,7 +6455,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace( __pyx_t_3 = __pyx_v_self->version; __Pyx_INCREF(__pyx_t_3); - /* "aiohttp/_http_parser.pyx":155 + /* "aiohttp/_http_parser.pyx":156 * self.path, * self.version, * self.headers, # <<<<<<<<<<<<<< @@ -6417,7 +6465,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace( __pyx_t_4 = __pyx_v_self->headers; __Pyx_INCREF(__pyx_t_4); - /* "aiohttp/_http_parser.pyx":156 + /* "aiohttp/_http_parser.pyx":157 * self.version, * self.headers, * self.raw_headers, # <<<<<<<<<<<<<< @@ -6427,16 +6475,16 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace( __pyx_t_5 = __pyx_v_self->raw_headers; __Pyx_INCREF(__pyx_t_5); - /* "aiohttp/_http_parser.pyx":157 + /* "aiohttp/_http_parser.pyx":158 * self.headers, * self.raw_headers, * self.should_close, # <<<<<<<<<<<<<< * self.compression, * self.upgrade, */ - __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_v_self->should_close); if (unlikely((__pyx_t_6 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 157, __pyx_L1_error) + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_v_self->should_close); if (unlikely((__pyx_t_6 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 158, __pyx_L1_error) - /* "aiohttp/_http_parser.pyx":158 + /* "aiohttp/_http_parser.pyx":159 * self.raw_headers, * self.should_close, * self.compression, # <<<<<<<<<<<<<< @@ -6446,25 +6494,25 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace( __pyx_t_7 = __pyx_v_self->compression; __Pyx_INCREF(__pyx_t_7); - /* "aiohttp/_http_parser.pyx":159 + /* "aiohttp/_http_parser.pyx":160 * self.should_close, * self.compression, * self.upgrade, # <<<<<<<<<<<<<< * self.chunked, * self.url) */ - __pyx_t_8 = __Pyx_PyObject_IsTrue(__pyx_v_self->upgrade); if (unlikely((__pyx_t_8 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 159, __pyx_L1_error) + __pyx_t_8 = __Pyx_PyObject_IsTrue(__pyx_v_self->upgrade); if (unlikely((__pyx_t_8 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 160, __pyx_L1_error) - /* "aiohttp/_http_parser.pyx":160 + /* "aiohttp/_http_parser.pyx":161 * self.compression, * self.upgrade, * self.chunked, # <<<<<<<<<<<<<< * self.url) * if "method" in dct: */ - __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_v_self->chunked); if (unlikely((__pyx_t_9 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 160, __pyx_L1_error) + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_v_self->chunked); if (unlikely((__pyx_t_9 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 161, __pyx_L1_error) - /* "aiohttp/_http_parser.pyx":161 + /* "aiohttp/_http_parser.pyx":162 * self.upgrade, * self.chunked, * self.url) # <<<<<<<<<<<<<< @@ -6474,14 +6522,14 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace( __pyx_t_10 = __pyx_v_self->url; __Pyx_INCREF(__pyx_t_10); - /* "aiohttp/_http_parser.pyx":152 + /* "aiohttp/_http_parser.pyx":153 * def _replace(self, **dct): * cdef RawRequestMessage ret * ret = _new_request_message(self.method, # <<<<<<<<<<<<<< * self.path, * self.version, */ - __pyx_t_11 = __pyx_f_7aiohttp_12_http_parser__new_request_message(((PyObject*)__pyx_t_1), ((PyObject*)__pyx_t_2), __pyx_t_3, __pyx_t_4, __pyx_t_5, __pyx_t_6, __pyx_t_7, __pyx_t_8, __pyx_t_9, __pyx_t_10); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 152, __pyx_L1_error) + __pyx_t_11 = __pyx_f_7aiohttp_12_http_parser__new_request_message(((PyObject*)__pyx_t_1), ((PyObject*)__pyx_t_2), __pyx_t_3, __pyx_t_4, __pyx_t_5, __pyx_t_6, __pyx_t_7, __pyx_t_8, __pyx_t_9, __pyx_t_10); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 153, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_11); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; @@ -6490,37 +6538,37 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace( __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - if (!(likely(((__pyx_t_11) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_11, __pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage))))) __PYX_ERR(0, 152, __pyx_L1_error) + if (!(likely(((__pyx_t_11) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_11, __pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage))))) __PYX_ERR(0, 153, __pyx_L1_error) __pyx_v_ret = ((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_t_11); __pyx_t_11 = 0; - /* "aiohttp/_http_parser.pyx":162 + /* "aiohttp/_http_parser.pyx":163 * self.chunked, * self.url) * if "method" in dct: # <<<<<<<<<<<<<< * ret.method = dct["method"] * if "path" in dct: */ - __pyx_t_9 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_method, __pyx_v_dct, Py_EQ)); if (unlikely((__pyx_t_9 < 0))) __PYX_ERR(0, 162, __pyx_L1_error) + __pyx_t_9 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_method, __pyx_v_dct, Py_EQ)); if (unlikely((__pyx_t_9 < 0))) __PYX_ERR(0, 163, __pyx_L1_error) if (__pyx_t_9) { - /* "aiohttp/_http_parser.pyx":163 + /* "aiohttp/_http_parser.pyx":164 * self.url) * if "method" in dct: * ret.method = dct["method"] # <<<<<<<<<<<<<< * if "path" in dct: * ret.path = dct["path"] */ - __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_method); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 163, __pyx_L1_error) + __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_method); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 164, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_11); - if (!(likely(PyUnicode_CheckExact(__pyx_t_11))||((__pyx_t_11) == Py_None) || __Pyx_RaiseUnexpectedTypeError("unicode", __pyx_t_11))) __PYX_ERR(0, 163, __pyx_L1_error) + if (!(likely(PyUnicode_CheckExact(__pyx_t_11))||((__pyx_t_11) == Py_None) || __Pyx_RaiseUnexpectedTypeError("unicode", __pyx_t_11))) __PYX_ERR(0, 164, __pyx_L1_error) __Pyx_GIVEREF(__pyx_t_11); __Pyx_GOTREF(__pyx_v_ret->method); __Pyx_DECREF(__pyx_v_ret->method); __pyx_v_ret->method = ((PyObject*)__pyx_t_11); __pyx_t_11 = 0; - /* "aiohttp/_http_parser.pyx":162 + /* "aiohttp/_http_parser.pyx":163 * self.chunked, * self.url) * if "method" in dct: # <<<<<<<<<<<<<< @@ -6529,33 +6577,33 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace( */ } - /* "aiohttp/_http_parser.pyx":164 + /* "aiohttp/_http_parser.pyx":165 * if "method" in dct: * ret.method = dct["method"] * if "path" in dct: # <<<<<<<<<<<<<< * ret.path = dct["path"] * if "version" in dct: */ - __pyx_t_9 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_path, __pyx_v_dct, Py_EQ)); if (unlikely((__pyx_t_9 < 0))) __PYX_ERR(0, 164, __pyx_L1_error) + __pyx_t_9 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_path, __pyx_v_dct, Py_EQ)); if (unlikely((__pyx_t_9 < 0))) __PYX_ERR(0, 165, __pyx_L1_error) if (__pyx_t_9) { - /* "aiohttp/_http_parser.pyx":165 + /* "aiohttp/_http_parser.pyx":166 * ret.method = dct["method"] * if "path" in dct: * ret.path = dct["path"] # <<<<<<<<<<<<<< * if "version" in dct: * ret.version = dct["version"] */ - __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_path); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 165, __pyx_L1_error) + __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_path); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 166, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_11); - if (!(likely(PyUnicode_CheckExact(__pyx_t_11))||((__pyx_t_11) == Py_None) || __Pyx_RaiseUnexpectedTypeError("unicode", __pyx_t_11))) __PYX_ERR(0, 165, __pyx_L1_error) + if (!(likely(PyUnicode_CheckExact(__pyx_t_11))||((__pyx_t_11) == Py_None) || __Pyx_RaiseUnexpectedTypeError("unicode", __pyx_t_11))) __PYX_ERR(0, 166, __pyx_L1_error) __Pyx_GIVEREF(__pyx_t_11); __Pyx_GOTREF(__pyx_v_ret->path); __Pyx_DECREF(__pyx_v_ret->path); __pyx_v_ret->path = ((PyObject*)__pyx_t_11); __pyx_t_11 = 0; - /* "aiohttp/_http_parser.pyx":164 + /* "aiohttp/_http_parser.pyx":165 * if "method" in dct: * ret.method = dct["method"] * if "path" in dct: # <<<<<<<<<<<<<< @@ -6564,24 +6612,24 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace( */ } - /* "aiohttp/_http_parser.pyx":166 + /* "aiohttp/_http_parser.pyx":167 * if "path" in dct: * ret.path = dct["path"] * if "version" in dct: # <<<<<<<<<<<<<< * ret.version = dct["version"] * if "headers" in dct: */ - __pyx_t_9 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_version, __pyx_v_dct, Py_EQ)); if (unlikely((__pyx_t_9 < 0))) __PYX_ERR(0, 166, __pyx_L1_error) + __pyx_t_9 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_version, __pyx_v_dct, Py_EQ)); if (unlikely((__pyx_t_9 < 0))) __PYX_ERR(0, 167, __pyx_L1_error) if (__pyx_t_9) { - /* "aiohttp/_http_parser.pyx":167 + /* "aiohttp/_http_parser.pyx":168 * ret.path = dct["path"] * if "version" in dct: * ret.version = dct["version"] # <<<<<<<<<<<<<< * if "headers" in dct: * ret.headers = dct["headers"] */ - __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_version); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 167, __pyx_L1_error) + __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_version); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 168, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_11); __Pyx_GIVEREF(__pyx_t_11); __Pyx_GOTREF(__pyx_v_ret->version); @@ -6589,7 +6637,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace( __pyx_v_ret->version = __pyx_t_11; __pyx_t_11 = 0; - /* "aiohttp/_http_parser.pyx":166 + /* "aiohttp/_http_parser.pyx":167 * if "path" in dct: * ret.path = dct["path"] * if "version" in dct: # <<<<<<<<<<<<<< @@ -6598,24 +6646,24 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace( */ } - /* "aiohttp/_http_parser.pyx":168 + /* "aiohttp/_http_parser.pyx":169 * if "version" in dct: * ret.version = dct["version"] * if "headers" in dct: # <<<<<<<<<<<<<< * ret.headers = dct["headers"] * if "raw_headers" in dct: */ - __pyx_t_9 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_headers, __pyx_v_dct, Py_EQ)); if (unlikely((__pyx_t_9 < 0))) __PYX_ERR(0, 168, __pyx_L1_error) + __pyx_t_9 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_headers, __pyx_v_dct, Py_EQ)); if (unlikely((__pyx_t_9 < 0))) __PYX_ERR(0, 169, __pyx_L1_error) if (__pyx_t_9) { - /* "aiohttp/_http_parser.pyx":169 + /* "aiohttp/_http_parser.pyx":170 * ret.version = dct["version"] * if "headers" in dct: * ret.headers = dct["headers"] # <<<<<<<<<<<<<< * if "raw_headers" in dct: * ret.raw_headers = dct["raw_headers"] */ - __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_headers); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 169, __pyx_L1_error) + __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_headers); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 170, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_11); __Pyx_GIVEREF(__pyx_t_11); __Pyx_GOTREF(__pyx_v_ret->headers); @@ -6623,7 +6671,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace( __pyx_v_ret->headers = __pyx_t_11; __pyx_t_11 = 0; - /* "aiohttp/_http_parser.pyx":168 + /* "aiohttp/_http_parser.pyx":169 * if "version" in dct: * ret.version = dct["version"] * if "headers" in dct: # <<<<<<<<<<<<<< @@ -6632,24 +6680,24 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace( */ } - /* "aiohttp/_http_parser.pyx":170 + /* "aiohttp/_http_parser.pyx":171 * if "headers" in dct: * ret.headers = dct["headers"] * if "raw_headers" in dct: # <<<<<<<<<<<<<< * ret.raw_headers = dct["raw_headers"] * if "should_close" in dct: */ - __pyx_t_9 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_raw_headers, __pyx_v_dct, Py_EQ)); if (unlikely((__pyx_t_9 < 0))) __PYX_ERR(0, 170, __pyx_L1_error) + __pyx_t_9 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_raw_headers, __pyx_v_dct, Py_EQ)); if (unlikely((__pyx_t_9 < 0))) __PYX_ERR(0, 171, __pyx_L1_error) if (__pyx_t_9) { - /* "aiohttp/_http_parser.pyx":171 + /* "aiohttp/_http_parser.pyx":172 * ret.headers = dct["headers"] * if "raw_headers" in dct: * ret.raw_headers = dct["raw_headers"] # <<<<<<<<<<<<<< * if "should_close" in dct: * ret.should_close = dct["should_close"] */ - __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_raw_headers); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 171, __pyx_L1_error) + __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_raw_headers); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 172, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_11); __Pyx_GIVEREF(__pyx_t_11); __Pyx_GOTREF(__pyx_v_ret->raw_headers); @@ -6657,7 +6705,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace( __pyx_v_ret->raw_headers = __pyx_t_11; __pyx_t_11 = 0; - /* "aiohttp/_http_parser.pyx":170 + /* "aiohttp/_http_parser.pyx":171 * if "headers" in dct: * ret.headers = dct["headers"] * if "raw_headers" in dct: # <<<<<<<<<<<<<< @@ -6666,24 +6714,24 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace( */ } - /* "aiohttp/_http_parser.pyx":172 + /* "aiohttp/_http_parser.pyx":173 * if "raw_headers" in dct: * ret.raw_headers = dct["raw_headers"] * if "should_close" in dct: # <<<<<<<<<<<<<< * ret.should_close = dct["should_close"] * if "compression" in dct: */ - __pyx_t_9 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_should_close, __pyx_v_dct, Py_EQ)); if (unlikely((__pyx_t_9 < 0))) __PYX_ERR(0, 172, __pyx_L1_error) + __pyx_t_9 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_should_close, __pyx_v_dct, Py_EQ)); if (unlikely((__pyx_t_9 < 0))) __PYX_ERR(0, 173, __pyx_L1_error) if (__pyx_t_9) { - /* "aiohttp/_http_parser.pyx":173 + /* "aiohttp/_http_parser.pyx":174 * ret.raw_headers = dct["raw_headers"] * if "should_close" in dct: * ret.should_close = dct["should_close"] # <<<<<<<<<<<<<< * if "compression" in dct: * ret.compression = dct["compression"] */ - __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_should_close); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 173, __pyx_L1_error) + __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_should_close); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 174, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_11); __Pyx_GIVEREF(__pyx_t_11); __Pyx_GOTREF(__pyx_v_ret->should_close); @@ -6691,7 +6739,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace( __pyx_v_ret->should_close = __pyx_t_11; __pyx_t_11 = 0; - /* "aiohttp/_http_parser.pyx":172 + /* "aiohttp/_http_parser.pyx":173 * if "raw_headers" in dct: * ret.raw_headers = dct["raw_headers"] * if "should_close" in dct: # <<<<<<<<<<<<<< @@ -6700,24 +6748,24 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace( */ } - /* "aiohttp/_http_parser.pyx":174 + /* "aiohttp/_http_parser.pyx":175 * if "should_close" in dct: * ret.should_close = dct["should_close"] * if "compression" in dct: # <<<<<<<<<<<<<< * ret.compression = dct["compression"] * if "upgrade" in dct: */ - __pyx_t_9 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_compression, __pyx_v_dct, Py_EQ)); if (unlikely((__pyx_t_9 < 0))) __PYX_ERR(0, 174, __pyx_L1_error) + __pyx_t_9 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_compression, __pyx_v_dct, Py_EQ)); if (unlikely((__pyx_t_9 < 0))) __PYX_ERR(0, 175, __pyx_L1_error) if (__pyx_t_9) { - /* "aiohttp/_http_parser.pyx":175 + /* "aiohttp/_http_parser.pyx":176 * ret.should_close = dct["should_close"] * if "compression" in dct: * ret.compression = dct["compression"] # <<<<<<<<<<<<<< * if "upgrade" in dct: * ret.upgrade = dct["upgrade"] */ - __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_compression); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 175, __pyx_L1_error) + __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_compression); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 176, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_11); __Pyx_GIVEREF(__pyx_t_11); __Pyx_GOTREF(__pyx_v_ret->compression); @@ -6725,7 +6773,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace( __pyx_v_ret->compression = __pyx_t_11; __pyx_t_11 = 0; - /* "aiohttp/_http_parser.pyx":174 + /* "aiohttp/_http_parser.pyx":175 * if "should_close" in dct: * ret.should_close = dct["should_close"] * if "compression" in dct: # <<<<<<<<<<<<<< @@ -6734,24 +6782,24 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace( */ } - /* "aiohttp/_http_parser.pyx":176 + /* "aiohttp/_http_parser.pyx":177 * if "compression" in dct: * ret.compression = dct["compression"] * if "upgrade" in dct: # <<<<<<<<<<<<<< * ret.upgrade = dct["upgrade"] * if "chunked" in dct: */ - __pyx_t_9 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_upgrade, __pyx_v_dct, Py_EQ)); if (unlikely((__pyx_t_9 < 0))) __PYX_ERR(0, 176, __pyx_L1_error) + __pyx_t_9 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_upgrade, __pyx_v_dct, Py_EQ)); if (unlikely((__pyx_t_9 < 0))) __PYX_ERR(0, 177, __pyx_L1_error) if (__pyx_t_9) { - /* "aiohttp/_http_parser.pyx":177 + /* "aiohttp/_http_parser.pyx":178 * ret.compression = dct["compression"] * if "upgrade" in dct: * ret.upgrade = dct["upgrade"] # <<<<<<<<<<<<<< * if "chunked" in dct: * ret.chunked = dct["chunked"] */ - __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_upgrade); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 177, __pyx_L1_error) + __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_upgrade); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 178, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_11); __Pyx_GIVEREF(__pyx_t_11); __Pyx_GOTREF(__pyx_v_ret->upgrade); @@ -6759,7 +6807,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace( __pyx_v_ret->upgrade = __pyx_t_11; __pyx_t_11 = 0; - /* "aiohttp/_http_parser.pyx":176 + /* "aiohttp/_http_parser.pyx":177 * if "compression" in dct: * ret.compression = dct["compression"] * if "upgrade" in dct: # <<<<<<<<<<<<<< @@ -6768,24 +6816,24 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace( */ } - /* "aiohttp/_http_parser.pyx":178 + /* "aiohttp/_http_parser.pyx":179 * if "upgrade" in dct: * ret.upgrade = dct["upgrade"] * if "chunked" in dct: # <<<<<<<<<<<<<< * ret.chunked = dct["chunked"] * if "url" in dct: */ - __pyx_t_9 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_chunked, __pyx_v_dct, Py_EQ)); if (unlikely((__pyx_t_9 < 0))) __PYX_ERR(0, 178, __pyx_L1_error) + __pyx_t_9 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_chunked, __pyx_v_dct, Py_EQ)); if (unlikely((__pyx_t_9 < 0))) __PYX_ERR(0, 179, __pyx_L1_error) if (__pyx_t_9) { - /* "aiohttp/_http_parser.pyx":179 + /* "aiohttp/_http_parser.pyx":180 * ret.upgrade = dct["upgrade"] * if "chunked" in dct: * ret.chunked = dct["chunked"] # <<<<<<<<<<<<<< * if "url" in dct: * ret.url = dct["url"] */ - __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_chunked); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 179, __pyx_L1_error) + __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_chunked); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 180, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_11); __Pyx_GIVEREF(__pyx_t_11); __Pyx_GOTREF(__pyx_v_ret->chunked); @@ -6793,7 +6841,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace( __pyx_v_ret->chunked = __pyx_t_11; __pyx_t_11 = 0; - /* "aiohttp/_http_parser.pyx":178 + /* "aiohttp/_http_parser.pyx":179 * if "upgrade" in dct: * ret.upgrade = dct["upgrade"] * if "chunked" in dct: # <<<<<<<<<<<<<< @@ -6802,24 +6850,24 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace( */ } - /* "aiohttp/_http_parser.pyx":180 + /* "aiohttp/_http_parser.pyx":181 * if "chunked" in dct: * ret.chunked = dct["chunked"] * if "url" in dct: # <<<<<<<<<<<<<< * ret.url = dct["url"] * return ret */ - __pyx_t_9 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_url, __pyx_v_dct, Py_EQ)); if (unlikely((__pyx_t_9 < 0))) __PYX_ERR(0, 180, __pyx_L1_error) + __pyx_t_9 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_url, __pyx_v_dct, Py_EQ)); if (unlikely((__pyx_t_9 < 0))) __PYX_ERR(0, 181, __pyx_L1_error) if (__pyx_t_9) { - /* "aiohttp/_http_parser.pyx":181 + /* "aiohttp/_http_parser.pyx":182 * ret.chunked = dct["chunked"] * if "url" in dct: * ret.url = dct["url"] # <<<<<<<<<<<<<< * return ret * */ - __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_url); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 181, __pyx_L1_error) + __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_url); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 182, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_11); __Pyx_GIVEREF(__pyx_t_11); __Pyx_GOTREF(__pyx_v_ret->url); @@ -6827,7 +6875,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace( __pyx_v_ret->url = __pyx_t_11; __pyx_t_11 = 0; - /* "aiohttp/_http_parser.pyx":180 + /* "aiohttp/_http_parser.pyx":181 * if "chunked" in dct: * ret.chunked = dct["chunked"] * if "url" in dct: # <<<<<<<<<<<<<< @@ -6836,7 +6884,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace( */ } - /* "aiohttp/_http_parser.pyx":182 + /* "aiohttp/_http_parser.pyx":183 * if "url" in dct: * ret.url = dct["url"] * return ret # <<<<<<<<<<<<<< @@ -6848,7 +6896,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace( __pyx_r = ((PyObject *)__pyx_v_ret); goto __pyx_L0; - /* "aiohttp/_http_parser.pyx":150 + /* "aiohttp/_http_parser.pyx":151 * return '' * * def _replace(self, **dct): # <<<<<<<<<<<<<< @@ -6875,7 +6923,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace( return __pyx_r; } -/* "aiohttp/_http_parser.pyx":111 +/* "aiohttp/_http_parser.pyx":112 * @cython.freelist(DEFAULT_FREELIST_SIZE) * cdef class RawRequestMessage: * cdef readonly str method # <<<<<<<<<<<<<< @@ -6914,7 +6962,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_6method___ return __pyx_r; } -/* "aiohttp/_http_parser.pyx":112 +/* "aiohttp/_http_parser.pyx":113 * cdef class RawRequestMessage: * cdef readonly str method * cdef readonly str path # <<<<<<<<<<<<<< @@ -6953,7 +7001,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4path___ge return __pyx_r; } -/* "aiohttp/_http_parser.pyx":113 +/* "aiohttp/_http_parser.pyx":114 * cdef readonly str method * cdef readonly str path * cdef readonly object version # HttpVersion # <<<<<<<<<<<<<< @@ -6992,7 +7040,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_7version__ return __pyx_r; } -/* "aiohttp/_http_parser.pyx":114 +/* "aiohttp/_http_parser.pyx":115 * cdef readonly str path * cdef readonly object version # HttpVersion * cdef readonly object headers # CIMultiDict # <<<<<<<<<<<<<< @@ -7031,7 +7079,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_7headers__ return __pyx_r; } -/* "aiohttp/_http_parser.pyx":115 +/* "aiohttp/_http_parser.pyx":116 * cdef readonly object version # HttpVersion * cdef readonly object headers # CIMultiDict * cdef readonly object raw_headers # tuple # <<<<<<<<<<<<<< @@ -7070,7 +7118,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_11raw_head return __pyx_r; } -/* "aiohttp/_http_parser.pyx":116 +/* "aiohttp/_http_parser.pyx":117 * cdef readonly object headers # CIMultiDict * cdef readonly object raw_headers # tuple * cdef readonly object should_close # <<<<<<<<<<<<<< @@ -7109,7 +7157,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_12should_c return __pyx_r; } -/* "aiohttp/_http_parser.pyx":117 +/* "aiohttp/_http_parser.pyx":118 * cdef readonly object raw_headers # tuple * cdef readonly object should_close * cdef readonly object compression # <<<<<<<<<<<<<< @@ -7148,7 +7196,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_11compress return __pyx_r; } -/* "aiohttp/_http_parser.pyx":118 +/* "aiohttp/_http_parser.pyx":119 * cdef readonly object should_close * cdef readonly object compression * cdef readonly object upgrade # <<<<<<<<<<<<<< @@ -7187,7 +7235,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_7upgrade__ return __pyx_r; } -/* "aiohttp/_http_parser.pyx":119 +/* "aiohttp/_http_parser.pyx":120 * cdef readonly object compression * cdef readonly object upgrade * cdef readonly object chunked # <<<<<<<<<<<<<< @@ -7226,7 +7274,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_7chunked__ return __pyx_r; } -/* "aiohttp/_http_parser.pyx":120 +/* "aiohttp/_http_parser.pyx":121 * cdef readonly object upgrade * cdef readonly object chunked * cdef readonly object url # yarl.URL # <<<<<<<<<<<<<< @@ -7750,7 +7798,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_8__setstat return __pyx_r; } -/* "aiohttp/_http_parser.pyx":184 +/* "aiohttp/_http_parser.pyx":185 * return ret * * cdef _new_request_message(str method, # <<<<<<<<<<<<<< @@ -7768,19 +7816,19 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser__new_request_message(PyObject * int __pyx_clineno = 0; __Pyx_RefNannySetupContext("_new_request_message", 1); - /* "aiohttp/_http_parser.pyx":195 + /* "aiohttp/_http_parser.pyx":196 * object url): * cdef RawRequestMessage ret * ret = RawRequestMessage.__new__(RawRequestMessage) # <<<<<<<<<<<<<< * ret.method = method * ret.path = path */ - __pyx_t_1 = ((PyObject *)__pyx_tp_new_7aiohttp_12_http_parser_RawRequestMessage(((PyTypeObject *)__pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage), __pyx_empty_tuple, NULL)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 195, __pyx_L1_error) + __pyx_t_1 = ((PyObject *)__pyx_tp_new_7aiohttp_12_http_parser_RawRequestMessage(((PyTypeObject *)__pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage), __pyx_empty_tuple, NULL)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 196, __pyx_L1_error) __Pyx_GOTREF((PyObject *)__pyx_t_1); __pyx_v_ret = ((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":196 + /* "aiohttp/_http_parser.pyx":197 * cdef RawRequestMessage ret * ret = RawRequestMessage.__new__(RawRequestMessage) * ret.method = method # <<<<<<<<<<<<<< @@ -7793,7 +7841,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser__new_request_message(PyObject * __Pyx_DECREF(__pyx_v_ret->method); __pyx_v_ret->method = __pyx_v_method; - /* "aiohttp/_http_parser.pyx":197 + /* "aiohttp/_http_parser.pyx":198 * ret = RawRequestMessage.__new__(RawRequestMessage) * ret.method = method * ret.path = path # <<<<<<<<<<<<<< @@ -7806,7 +7854,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser__new_request_message(PyObject * __Pyx_DECREF(__pyx_v_ret->path); __pyx_v_ret->path = __pyx_v_path; - /* "aiohttp/_http_parser.pyx":198 + /* "aiohttp/_http_parser.pyx":199 * ret.method = method * ret.path = path * ret.version = version # <<<<<<<<<<<<<< @@ -7819,7 +7867,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser__new_request_message(PyObject * __Pyx_DECREF(__pyx_v_ret->version); __pyx_v_ret->version = __pyx_v_version; - /* "aiohttp/_http_parser.pyx":199 + /* "aiohttp/_http_parser.pyx":200 * ret.path = path * ret.version = version * ret.headers = headers # <<<<<<<<<<<<<< @@ -7832,7 +7880,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser__new_request_message(PyObject * __Pyx_DECREF(__pyx_v_ret->headers); __pyx_v_ret->headers = __pyx_v_headers; - /* "aiohttp/_http_parser.pyx":200 + /* "aiohttp/_http_parser.pyx":201 * ret.version = version * ret.headers = headers * ret.raw_headers = raw_headers # <<<<<<<<<<<<<< @@ -7845,14 +7893,14 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser__new_request_message(PyObject * __Pyx_DECREF(__pyx_v_ret->raw_headers); __pyx_v_ret->raw_headers = __pyx_v_raw_headers; - /* "aiohttp/_http_parser.pyx":201 + /* "aiohttp/_http_parser.pyx":202 * ret.headers = headers * ret.raw_headers = raw_headers * ret.should_close = should_close # <<<<<<<<<<<<<< * ret.compression = compression * ret.upgrade = upgrade */ - __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_should_close); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 201, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_should_close); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 202, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_GIVEREF(__pyx_t_1); __Pyx_GOTREF(__pyx_v_ret->should_close); @@ -7860,7 +7908,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser__new_request_message(PyObject * __pyx_v_ret->should_close = __pyx_t_1; __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":202 + /* "aiohttp/_http_parser.pyx":203 * ret.raw_headers = raw_headers * ret.should_close = should_close * ret.compression = compression # <<<<<<<<<<<<<< @@ -7873,14 +7921,14 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser__new_request_message(PyObject * __Pyx_DECREF(__pyx_v_ret->compression); __pyx_v_ret->compression = __pyx_v_compression; - /* "aiohttp/_http_parser.pyx":203 + /* "aiohttp/_http_parser.pyx":204 * ret.should_close = should_close * ret.compression = compression * ret.upgrade = upgrade # <<<<<<<<<<<<<< * ret.chunked = chunked * ret.url = url */ - __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_upgrade); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 203, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_upgrade); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 204, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_GIVEREF(__pyx_t_1); __Pyx_GOTREF(__pyx_v_ret->upgrade); @@ -7888,14 +7936,14 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser__new_request_message(PyObject * __pyx_v_ret->upgrade = __pyx_t_1; __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":204 + /* "aiohttp/_http_parser.pyx":205 * ret.compression = compression * ret.upgrade = upgrade * ret.chunked = chunked # <<<<<<<<<<<<<< * ret.url = url * return ret */ - __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_chunked); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 204, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_chunked); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 205, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_GIVEREF(__pyx_t_1); __Pyx_GOTREF(__pyx_v_ret->chunked); @@ -7903,7 +7951,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser__new_request_message(PyObject * __pyx_v_ret->chunked = __pyx_t_1; __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":205 + /* "aiohttp/_http_parser.pyx":206 * ret.upgrade = upgrade * ret.chunked = chunked * ret.url = url # <<<<<<<<<<<<<< @@ -7916,7 +7964,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser__new_request_message(PyObject * __Pyx_DECREF(__pyx_v_ret->url); __pyx_v_ret->url = __pyx_v_url; - /* "aiohttp/_http_parser.pyx":206 + /* "aiohttp/_http_parser.pyx":207 * ret.chunked = chunked * ret.url = url * return ret # <<<<<<<<<<<<<< @@ -7928,7 +7976,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser__new_request_message(PyObject * __pyx_r = ((PyObject *)__pyx_v_ret); goto __pyx_L0; - /* "aiohttp/_http_parser.pyx":184 + /* "aiohttp/_http_parser.pyx":185 * return ret * * cdef _new_request_message(str method, # <<<<<<<<<<<<<< @@ -7948,7 +7996,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser__new_request_message(PyObject * return __pyx_r; } -/* "aiohttp/_http_parser.pyx":221 +/* "aiohttp/_http_parser.pyx":222 * cdef readonly object chunked * * def __init__(self, version, code, reason, headers, raw_headers, # <<<<<<<<<<<<<< @@ -8016,7 +8064,7 @@ static int __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_1__init__(PyObj (void)__Pyx_Arg_NewRef_VARARGS(values[0]); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 221, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 222, __pyx_L3_error) else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: @@ -8024,9 +8072,9 @@ static int __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_1__init__(PyObj (void)__Pyx_Arg_NewRef_VARARGS(values[1]); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 221, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 222, __pyx_L3_error) else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 1); __PYX_ERR(0, 221, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 1); __PYX_ERR(0, 222, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 2: @@ -8034,9 +8082,9 @@ static int __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_1__init__(PyObj (void)__Pyx_Arg_NewRef_VARARGS(values[2]); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 221, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 222, __pyx_L3_error) else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 2); __PYX_ERR(0, 221, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 2); __PYX_ERR(0, 222, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 3: @@ -8044,9 +8092,9 @@ static int __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_1__init__(PyObj (void)__Pyx_Arg_NewRef_VARARGS(values[3]); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 221, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 222, __pyx_L3_error) else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 3); __PYX_ERR(0, 221, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 3); __PYX_ERR(0, 222, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 4: @@ -8054,9 +8102,9 @@ static int __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_1__init__(PyObj (void)__Pyx_Arg_NewRef_VARARGS(values[4]); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 221, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 222, __pyx_L3_error) else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 4); __PYX_ERR(0, 221, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 4); __PYX_ERR(0, 222, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 5: @@ -8064,9 +8112,9 @@ static int __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_1__init__(PyObj (void)__Pyx_Arg_NewRef_VARARGS(values[5]); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 221, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 222, __pyx_L3_error) else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 5); __PYX_ERR(0, 221, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 5); __PYX_ERR(0, 222, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 6: @@ -8074,9 +8122,9 @@ static int __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_1__init__(PyObj (void)__Pyx_Arg_NewRef_VARARGS(values[6]); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 221, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 222, __pyx_L3_error) else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 6); __PYX_ERR(0, 221, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 6); __PYX_ERR(0, 222, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 7: @@ -8084,9 +8132,9 @@ static int __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_1__init__(PyObj (void)__Pyx_Arg_NewRef_VARARGS(values[7]); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 221, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 222, __pyx_L3_error) else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 7); __PYX_ERR(0, 221, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 7); __PYX_ERR(0, 222, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 8: @@ -8094,14 +8142,14 @@ static int __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_1__init__(PyObj (void)__Pyx_Arg_NewRef_VARARGS(values[8]); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 221, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 222, __pyx_L3_error) else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 8); __PYX_ERR(0, 221, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 8); __PYX_ERR(0, 222, __pyx_L3_error) } } if (unlikely(kw_args > 0)) { const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__init__") < 0)) __PYX_ERR(0, 221, __pyx_L3_error) + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__init__") < 0)) __PYX_ERR(0, 222, __pyx_L3_error) } } else if (unlikely(__pyx_nargs != 9)) { goto __pyx_L5_argtuple_error; @@ -8128,7 +8176,7 @@ static int __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_1__init__(PyObj } goto __pyx_L6_skip; __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, __pyx_nargs); __PYX_ERR(0, 221, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, __pyx_nargs); __PYX_ERR(0, 222, __pyx_L3_error) __pyx_L6_skip:; goto __pyx_L4_argument_unpacking_done; __pyx_L3_error:; @@ -8165,7 +8213,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage___init__(struct int __pyx_clineno = 0; __Pyx_RefNannySetupContext("__init__", 1); - /* "aiohttp/_http_parser.pyx":223 + /* "aiohttp/_http_parser.pyx":224 * def __init__(self, version, code, reason, headers, raw_headers, * should_close, compression, upgrade, chunked): * self.version = version # <<<<<<<<<<<<<< @@ -8178,24 +8226,24 @@ static int __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage___init__(struct __Pyx_DECREF(__pyx_v_self->version); __pyx_v_self->version = __pyx_v_version; - /* "aiohttp/_http_parser.pyx":224 + /* "aiohttp/_http_parser.pyx":225 * should_close, compression, upgrade, chunked): * self.version = version * self.code = code # <<<<<<<<<<<<<< * self.reason = reason * self.headers = headers */ - __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v_code); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 224, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v_code); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 225, __pyx_L1_error) __pyx_v_self->code = __pyx_t_1; - /* "aiohttp/_http_parser.pyx":225 + /* "aiohttp/_http_parser.pyx":226 * self.version = version * self.code = code * self.reason = reason # <<<<<<<<<<<<<< * self.headers = headers * self.raw_headers = raw_headers */ - if (!(likely(PyUnicode_CheckExact(__pyx_v_reason))||((__pyx_v_reason) == Py_None) || __Pyx_RaiseUnexpectedTypeError("unicode", __pyx_v_reason))) __PYX_ERR(0, 225, __pyx_L1_error) + if (!(likely(PyUnicode_CheckExact(__pyx_v_reason))||((__pyx_v_reason) == Py_None) || __Pyx_RaiseUnexpectedTypeError("unicode", __pyx_v_reason))) __PYX_ERR(0, 226, __pyx_L1_error) __pyx_t_2 = __pyx_v_reason; __Pyx_INCREF(__pyx_t_2); __Pyx_GIVEREF(__pyx_t_2); @@ -8204,7 +8252,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage___init__(struct __pyx_v_self->reason = ((PyObject*)__pyx_t_2); __pyx_t_2 = 0; - /* "aiohttp/_http_parser.pyx":226 + /* "aiohttp/_http_parser.pyx":227 * self.code = code * self.reason = reason * self.headers = headers # <<<<<<<<<<<<<< @@ -8217,7 +8265,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage___init__(struct __Pyx_DECREF(__pyx_v_self->headers); __pyx_v_self->headers = __pyx_v_headers; - /* "aiohttp/_http_parser.pyx":227 + /* "aiohttp/_http_parser.pyx":228 * self.reason = reason * self.headers = headers * self.raw_headers = raw_headers # <<<<<<<<<<<<<< @@ -8230,7 +8278,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage___init__(struct __Pyx_DECREF(__pyx_v_self->raw_headers); __pyx_v_self->raw_headers = __pyx_v_raw_headers; - /* "aiohttp/_http_parser.pyx":228 + /* "aiohttp/_http_parser.pyx":229 * self.headers = headers * self.raw_headers = raw_headers * self.should_close = should_close # <<<<<<<<<<<<<< @@ -8243,7 +8291,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage___init__(struct __Pyx_DECREF(__pyx_v_self->should_close); __pyx_v_self->should_close = __pyx_v_should_close; - /* "aiohttp/_http_parser.pyx":229 + /* "aiohttp/_http_parser.pyx":230 * self.raw_headers = raw_headers * self.should_close = should_close * self.compression = compression # <<<<<<<<<<<<<< @@ -8256,7 +8304,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage___init__(struct __Pyx_DECREF(__pyx_v_self->compression); __pyx_v_self->compression = __pyx_v_compression; - /* "aiohttp/_http_parser.pyx":230 + /* "aiohttp/_http_parser.pyx":231 * self.should_close = should_close * self.compression = compression * self.upgrade = upgrade # <<<<<<<<<<<<<< @@ -8269,7 +8317,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage___init__(struct __Pyx_DECREF(__pyx_v_self->upgrade); __pyx_v_self->upgrade = __pyx_v_upgrade; - /* "aiohttp/_http_parser.pyx":231 + /* "aiohttp/_http_parser.pyx":232 * self.compression = compression * self.upgrade = upgrade * self.chunked = chunked # <<<<<<<<<<<<<< @@ -8282,7 +8330,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage___init__(struct __Pyx_DECREF(__pyx_v_self->chunked); __pyx_v_self->chunked = __pyx_v_chunked; - /* "aiohttp/_http_parser.pyx":221 + /* "aiohttp/_http_parser.pyx":222 * cdef readonly object chunked * * def __init__(self, version, code, reason, headers, raw_headers, # <<<<<<<<<<<<<< @@ -8302,7 +8350,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage___init__(struct return __pyx_r; } -/* "aiohttp/_http_parser.pyx":233 +/* "aiohttp/_http_parser.pyx":234 * self.chunked = chunked * * def __repr__(self): # <<<<<<<<<<<<<< @@ -8326,7 +8374,7 @@ static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_3__repr__ } static PyObject *__pyx_gb_7aiohttp_12_http_parser_18RawResponseMessage_8__repr___2generator1(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value); /* proto */ -/* "aiohttp/_http_parser.pyx":244 +/* "aiohttp/_http_parser.pyx":245 * info.append(("upgrade", self.upgrade)) * info.append(("chunked", self.chunked)) * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) # <<<<<<<<<<<<<< @@ -8346,7 +8394,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_8__repr__ if (unlikely(!__pyx_cur_scope)) { __pyx_cur_scope = ((struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr *)Py_None); __Pyx_INCREF(Py_None); - __PYX_ERR(0, 244, __pyx_L1_error) + __PYX_ERR(0, 245, __pyx_L1_error) } else { __Pyx_GOTREF((PyObject *)__pyx_cur_scope); } @@ -8354,7 +8402,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_8__repr__ __Pyx_INCREF(__pyx_cur_scope->__pyx_genexpr_arg_0); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_genexpr_arg_0); { - __pyx_CoroutineObject *gen = __Pyx_Generator_New((__pyx_coroutine_body_t) __pyx_gb_7aiohttp_12_http_parser_18RawResponseMessage_8__repr___2generator1, NULL, (PyObject *) __pyx_cur_scope, __pyx_n_s_genexpr, __pyx_n_s_repr___locals_genexpr, __pyx_n_s_aiohttp__http_parser); if (unlikely(!gen)) __PYX_ERR(0, 244, __pyx_L1_error) + __pyx_CoroutineObject *gen = __Pyx_Generator_New((__pyx_coroutine_body_t) __pyx_gb_7aiohttp_12_http_parser_18RawResponseMessage_8__repr___2generator1, NULL, (PyObject *) __pyx_cur_scope, __pyx_n_s_genexpr, __pyx_n_s_repr___locals_genexpr, __pyx_n_s_aiohttp__http_parser); if (unlikely(!gen)) __PYX_ERR(0, 245, __pyx_L1_error) __Pyx_DECREF(__pyx_cur_scope); __Pyx_RefNannyFinishContext(); return (PyObject *) gen; @@ -8393,24 +8441,24 @@ static PyObject *__pyx_gb_7aiohttp_12_http_parser_18RawResponseMessage_8__repr__ return NULL; } __pyx_L3_first_run:; - if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 244, __pyx_L1_error) - __pyx_r = PyList_New(0); if (unlikely(!__pyx_r)) __PYX_ERR(0, 244, __pyx_L1_error) + if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 245, __pyx_L1_error) + __pyx_r = PyList_New(0); if (unlikely(!__pyx_r)) __PYX_ERR(0, 245, __pyx_L1_error) __Pyx_GOTREF(__pyx_r); - if (unlikely(!__pyx_cur_scope->__pyx_genexpr_arg_0)) { __Pyx_RaiseUnboundLocalError(".0"); __PYX_ERR(0, 244, __pyx_L1_error) } + if (unlikely(!__pyx_cur_scope->__pyx_genexpr_arg_0)) { __Pyx_RaiseUnboundLocalError(".0"); __PYX_ERR(0, 245, __pyx_L1_error) } __pyx_t_1 = __pyx_cur_scope->__pyx_genexpr_arg_0; __Pyx_INCREF(__pyx_t_1); __pyx_t_2 = 0; for (;;) { { Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_1); #if !CYTHON_ASSUME_SAFE_MACROS - if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 244, __pyx_L1_error) + if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 245, __pyx_L1_error) #endif if (__pyx_t_2 >= __pyx_temp) break; } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_3 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_3); __pyx_t_2++; if (unlikely((0 < 0))) __PYX_ERR(0, 244, __pyx_L1_error) + __pyx_t_3 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_3); __pyx_t_2++; if (unlikely((0 < 0))) __PYX_ERR(0, 245, __pyx_L1_error) #else - __pyx_t_3 = __Pyx_PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 244, __pyx_L1_error) + __pyx_t_3 = __Pyx_PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 245, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); #endif if ((likely(PyTuple_CheckExact(__pyx_t_3))) || (PyList_CheckExact(__pyx_t_3))) { @@ -8419,7 +8467,7 @@ static PyObject *__pyx_gb_7aiohttp_12_http_parser_18RawResponseMessage_8__repr__ if (unlikely(size != 2)) { if (size > 2) __Pyx_RaiseTooManyValuesError(2); else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); - __PYX_ERR(0, 244, __pyx_L1_error) + __PYX_ERR(0, 245, __pyx_L1_error) } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS if (likely(PyTuple_CheckExact(sequence))) { @@ -8432,15 +8480,15 @@ static PyObject *__pyx_gb_7aiohttp_12_http_parser_18RawResponseMessage_8__repr__ __Pyx_INCREF(__pyx_t_4); __Pyx_INCREF(__pyx_t_5); #else - __pyx_t_4 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 244, __pyx_L1_error) + __pyx_t_4 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 245, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); - __pyx_t_5 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 244, __pyx_L1_error) + __pyx_t_5 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 245, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); #endif __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; } else { Py_ssize_t index = -1; - __pyx_t_6 = PyObject_GetIter(__pyx_t_3); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 244, __pyx_L1_error) + __pyx_t_6 = PyObject_GetIter(__pyx_t_3); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 245, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_t_7 = __Pyx_PyObject_GetIterNextFunc(__pyx_t_6); @@ -8448,7 +8496,7 @@ static PyObject *__pyx_gb_7aiohttp_12_http_parser_18RawResponseMessage_8__repr__ __Pyx_GOTREF(__pyx_t_4); index = 1; __pyx_t_5 = __pyx_t_7(__pyx_t_6); if (unlikely(!__pyx_t_5)) goto __pyx_L6_unpacking_failed; __Pyx_GOTREF(__pyx_t_5); - if (__Pyx_IternextUnpackEndCheck(__pyx_t_7(__pyx_t_6), 2) < 0) __PYX_ERR(0, 244, __pyx_L1_error) + if (__Pyx_IternextUnpackEndCheck(__pyx_t_7(__pyx_t_6), 2) < 0) __PYX_ERR(0, 245, __pyx_L1_error) __pyx_t_7 = NULL; __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; goto __pyx_L7_unpacking_done; @@ -8456,7 +8504,7 @@ static PyObject *__pyx_gb_7aiohttp_12_http_parser_18RawResponseMessage_8__repr__ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __pyx_t_7 = NULL; if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); - __PYX_ERR(0, 244, __pyx_L1_error) + __PYX_ERR(0, 245, __pyx_L1_error) __pyx_L7_unpacking_done:; } __Pyx_XGOTREF(__pyx_cur_scope->__pyx_v_name); @@ -8467,15 +8515,15 @@ static PyObject *__pyx_gb_7aiohttp_12_http_parser_18RawResponseMessage_8__repr__ __Pyx_XDECREF_SET(__pyx_cur_scope->__pyx_v_val, __pyx_t_5); __Pyx_GIVEREF(__pyx_t_5); __pyx_t_5 = 0; - __pyx_t_3 = PyNumber_Add(__pyx_cur_scope->__pyx_v_name, __pyx_kp_u_); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 244, __pyx_L1_error) + __pyx_t_3 = PyNumber_Add(__pyx_cur_scope->__pyx_v_name, __pyx_kp_u_); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 245, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __pyx_t_5 = PyObject_Repr(__pyx_cur_scope->__pyx_v_val); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 244, __pyx_L1_error) + __pyx_t_5 = PyObject_Repr(__pyx_cur_scope->__pyx_v_val); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 245, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); - __pyx_t_4 = PyNumber_Add(__pyx_t_3, __pyx_t_5); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 244, __pyx_L1_error) + __pyx_t_4 = PyNumber_Add(__pyx_t_3, __pyx_t_5); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 245, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - if (unlikely(__Pyx_ListComp_Append(__pyx_r, (PyObject*)__pyx_t_4))) __PYX_ERR(0, 244, __pyx_L1_error) + if (unlikely(__Pyx_ListComp_Append(__pyx_r, (PyObject*)__pyx_t_4))) __PYX_ERR(0, 245, __pyx_L1_error) __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; } __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; @@ -8503,7 +8551,7 @@ static PyObject *__pyx_gb_7aiohttp_12_http_parser_18RawResponseMessage_8__repr__ return __pyx_r; } -/* "aiohttp/_http_parser.pyx":233 +/* "aiohttp/_http_parser.pyx":234 * self.chunked = chunked * * def __repr__(self): # <<<<<<<<<<<<<< @@ -8525,201 +8573,201 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_2__repr__ int __pyx_clineno = 0; __Pyx_RefNannySetupContext("__repr__", 1); - /* "aiohttp/_http_parser.pyx":234 + /* "aiohttp/_http_parser.pyx":235 * * def __repr__(self): * info = [] # <<<<<<<<<<<<<< * info.append(("version", self.version)) * info.append(("code", self.code)) */ - __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 234, __pyx_L1_error) + __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 235, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_v_info = ((PyObject*)__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":235 + /* "aiohttp/_http_parser.pyx":236 * def __repr__(self): * info = [] * info.append(("version", self.version)) # <<<<<<<<<<<<<< * info.append(("code", self.code)) * info.append(("reason", self.reason)) */ - __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 235, __pyx_L1_error) + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 236, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_INCREF(__pyx_n_u_version); __Pyx_GIVEREF(__pyx_n_u_version); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_version)) __PYX_ERR(0, 235, __pyx_L1_error); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_version)) __PYX_ERR(0, 236, __pyx_L1_error); __Pyx_INCREF(__pyx_v_self->version); __Pyx_GIVEREF(__pyx_v_self->version); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->version)) __PYX_ERR(0, 235, __pyx_L1_error); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 235, __pyx_L1_error) + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->version)) __PYX_ERR(0, 236, __pyx_L1_error); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 236, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":236 + /* "aiohttp/_http_parser.pyx":237 * info = [] * info.append(("version", self.version)) * info.append(("code", self.code)) # <<<<<<<<<<<<<< * info.append(("reason", self.reason)) * info.append(("headers", self.headers)) */ - __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->code); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 236, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->code); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 237, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 236, __pyx_L1_error) + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 237, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_INCREF(__pyx_n_u_code); __Pyx_GIVEREF(__pyx_n_u_code); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_code)) __PYX_ERR(0, 236, __pyx_L1_error); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_code)) __PYX_ERR(0, 237, __pyx_L1_error); __Pyx_GIVEREF(__pyx_t_1); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_1)) __PYX_ERR(0, 236, __pyx_L1_error); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_1)) __PYX_ERR(0, 237, __pyx_L1_error); __pyx_t_1 = 0; - __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 236, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 237, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - /* "aiohttp/_http_parser.pyx":237 + /* "aiohttp/_http_parser.pyx":238 * info.append(("version", self.version)) * info.append(("code", self.code)) * info.append(("reason", self.reason)) # <<<<<<<<<<<<<< * info.append(("headers", self.headers)) * info.append(("raw_headers", self.raw_headers)) */ - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 237, __pyx_L1_error) + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 238, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_INCREF(__pyx_n_u_reason); __Pyx_GIVEREF(__pyx_n_u_reason); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_reason)) __PYX_ERR(0, 237, __pyx_L1_error); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_reason)) __PYX_ERR(0, 238, __pyx_L1_error); __Pyx_INCREF(__pyx_v_self->reason); __Pyx_GIVEREF(__pyx_v_self->reason); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_self->reason)) __PYX_ERR(0, 237, __pyx_L1_error); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 237, __pyx_L1_error) + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_self->reason)) __PYX_ERR(0, 238, __pyx_L1_error); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 238, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - /* "aiohttp/_http_parser.pyx":238 + /* "aiohttp/_http_parser.pyx":239 * info.append(("code", self.code)) * info.append(("reason", self.reason)) * info.append(("headers", self.headers)) # <<<<<<<<<<<<<< * info.append(("raw_headers", self.raw_headers)) * info.append(("should_close", self.should_close)) */ - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 238, __pyx_L1_error) + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 239, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_INCREF(__pyx_n_u_headers); __Pyx_GIVEREF(__pyx_n_u_headers); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_headers)) __PYX_ERR(0, 238, __pyx_L1_error); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_headers)) __PYX_ERR(0, 239, __pyx_L1_error); __Pyx_INCREF(__pyx_v_self->headers); __Pyx_GIVEREF(__pyx_v_self->headers); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_self->headers)) __PYX_ERR(0, 238, __pyx_L1_error); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 238, __pyx_L1_error) + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_self->headers)) __PYX_ERR(0, 239, __pyx_L1_error); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 239, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - /* "aiohttp/_http_parser.pyx":239 + /* "aiohttp/_http_parser.pyx":240 * info.append(("reason", self.reason)) * info.append(("headers", self.headers)) * info.append(("raw_headers", self.raw_headers)) # <<<<<<<<<<<<<< * info.append(("should_close", self.should_close)) * info.append(("compression", self.compression)) */ - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 239, __pyx_L1_error) + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 240, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_INCREF(__pyx_n_u_raw_headers); __Pyx_GIVEREF(__pyx_n_u_raw_headers); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_raw_headers)) __PYX_ERR(0, 239, __pyx_L1_error); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_raw_headers)) __PYX_ERR(0, 240, __pyx_L1_error); __Pyx_INCREF(__pyx_v_self->raw_headers); __Pyx_GIVEREF(__pyx_v_self->raw_headers); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_self->raw_headers)) __PYX_ERR(0, 239, __pyx_L1_error); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 239, __pyx_L1_error) + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_self->raw_headers)) __PYX_ERR(0, 240, __pyx_L1_error); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 240, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - /* "aiohttp/_http_parser.pyx":240 + /* "aiohttp/_http_parser.pyx":241 * info.append(("headers", self.headers)) * info.append(("raw_headers", self.raw_headers)) * info.append(("should_close", self.should_close)) # <<<<<<<<<<<<<< * info.append(("compression", self.compression)) * info.append(("upgrade", self.upgrade)) */ - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 240, __pyx_L1_error) + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 241, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_INCREF(__pyx_n_u_should_close); __Pyx_GIVEREF(__pyx_n_u_should_close); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_should_close)) __PYX_ERR(0, 240, __pyx_L1_error); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_should_close)) __PYX_ERR(0, 241, __pyx_L1_error); __Pyx_INCREF(__pyx_v_self->should_close); __Pyx_GIVEREF(__pyx_v_self->should_close); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_self->should_close)) __PYX_ERR(0, 240, __pyx_L1_error); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 240, __pyx_L1_error) + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_self->should_close)) __PYX_ERR(0, 241, __pyx_L1_error); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 241, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - /* "aiohttp/_http_parser.pyx":241 + /* "aiohttp/_http_parser.pyx":242 * info.append(("raw_headers", self.raw_headers)) * info.append(("should_close", self.should_close)) * info.append(("compression", self.compression)) # <<<<<<<<<<<<<< * info.append(("upgrade", self.upgrade)) * info.append(("chunked", self.chunked)) */ - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 241, __pyx_L1_error) + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 242, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_INCREF(__pyx_n_u_compression); __Pyx_GIVEREF(__pyx_n_u_compression); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_compression)) __PYX_ERR(0, 241, __pyx_L1_error); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_compression)) __PYX_ERR(0, 242, __pyx_L1_error); __Pyx_INCREF(__pyx_v_self->compression); __Pyx_GIVEREF(__pyx_v_self->compression); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_self->compression)) __PYX_ERR(0, 241, __pyx_L1_error); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 241, __pyx_L1_error) + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_self->compression)) __PYX_ERR(0, 242, __pyx_L1_error); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 242, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - /* "aiohttp/_http_parser.pyx":242 + /* "aiohttp/_http_parser.pyx":243 * info.append(("should_close", self.should_close)) * info.append(("compression", self.compression)) * info.append(("upgrade", self.upgrade)) # <<<<<<<<<<<<<< * info.append(("chunked", self.chunked)) * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) */ - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 242, __pyx_L1_error) + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 243, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_INCREF(__pyx_n_u_upgrade); __Pyx_GIVEREF(__pyx_n_u_upgrade); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_upgrade)) __PYX_ERR(0, 242, __pyx_L1_error); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_upgrade)) __PYX_ERR(0, 243, __pyx_L1_error); __Pyx_INCREF(__pyx_v_self->upgrade); __Pyx_GIVEREF(__pyx_v_self->upgrade); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_self->upgrade)) __PYX_ERR(0, 242, __pyx_L1_error); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 242, __pyx_L1_error) + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_self->upgrade)) __PYX_ERR(0, 243, __pyx_L1_error); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 243, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - /* "aiohttp/_http_parser.pyx":243 + /* "aiohttp/_http_parser.pyx":244 * info.append(("compression", self.compression)) * info.append(("upgrade", self.upgrade)) * info.append(("chunked", self.chunked)) # <<<<<<<<<<<<<< * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) * return '' */ - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 243, __pyx_L1_error) + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 244, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_INCREF(__pyx_n_u_chunked); __Pyx_GIVEREF(__pyx_n_u_chunked); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_chunked)) __PYX_ERR(0, 243, __pyx_L1_error); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_chunked)) __PYX_ERR(0, 244, __pyx_L1_error); __Pyx_INCREF(__pyx_v_self->chunked); __Pyx_GIVEREF(__pyx_v_self->chunked); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_self->chunked)) __PYX_ERR(0, 243, __pyx_L1_error); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 243, __pyx_L1_error) + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_self->chunked)) __PYX_ERR(0, 244, __pyx_L1_error); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 244, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - /* "aiohttp/_http_parser.pyx":244 + /* "aiohttp/_http_parser.pyx":245 * info.append(("upgrade", self.upgrade)) * info.append(("chunked", self.chunked)) * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) # <<<<<<<<<<<<<< * return '' * */ - __pyx_t_3 = __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_8__repr___genexpr(NULL, __pyx_v_info); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 244, __pyx_L1_error) + __pyx_t_3 = __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_8__repr___genexpr(NULL, __pyx_v_info); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 245, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_Generator_Next(__pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 244, __pyx_L1_error) + __pyx_t_1 = __Pyx_Generator_Next(__pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 245, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = PyUnicode_Join(__pyx_kp_u__2, __pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 244, __pyx_L1_error) + __pyx_t_3 = PyUnicode_Join(__pyx_kp_u__2, __pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 245, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_v_sinfo = ((PyObject*)__pyx_t_3); __pyx_t_3 = 0; - /* "aiohttp/_http_parser.pyx":245 + /* "aiohttp/_http_parser.pyx":246 * info.append(("chunked", self.chunked)) * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) * return '' # <<<<<<<<<<<<<< @@ -8727,16 +8775,16 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_2__repr__ * */ __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = __Pyx_PyUnicode_ConcatSafe(__pyx_kp_u_RawResponseMessage, __pyx_v_sinfo); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 245, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyUnicode_ConcatSafe(__pyx_kp_u_RawResponseMessage, __pyx_v_sinfo); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 246, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyUnicode_ConcatInPlace(__pyx_t_3, __pyx_kp_u__3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 245, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyUnicode_ConcatInPlace(__pyx_t_3, __pyx_kp_u__3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 246, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0; - /* "aiohttp/_http_parser.pyx":233 + /* "aiohttp/_http_parser.pyx":234 * self.chunked = chunked * * def __repr__(self): # <<<<<<<<<<<<<< @@ -8759,7 +8807,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_2__repr__ return __pyx_r; } -/* "aiohttp/_http_parser.pyx":211 +/* "aiohttp/_http_parser.pyx":212 * @cython.freelist(DEFAULT_FREELIST_SIZE) * cdef class RawResponseMessage: * cdef readonly object version # HttpVersion # <<<<<<<<<<<<<< @@ -8798,7 +8846,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_7version_ return __pyx_r; } -/* "aiohttp/_http_parser.pyx":212 +/* "aiohttp/_http_parser.pyx":213 * cdef class RawResponseMessage: * cdef readonly object version # HttpVersion * cdef readonly int code # <<<<<<<<<<<<<< @@ -8830,7 +8878,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_4code___g int __pyx_clineno = 0; __Pyx_RefNannySetupContext("__get__", 1); __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->code); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 212, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->code); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 213, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = __pyx_t_1; __pyx_t_1 = 0; @@ -8847,7 +8895,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_4code___g return __pyx_r; } -/* "aiohttp/_http_parser.pyx":213 +/* "aiohttp/_http_parser.pyx":214 * cdef readonly object version # HttpVersion * cdef readonly int code * cdef readonly str reason # <<<<<<<<<<<<<< @@ -8886,7 +8934,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_6reason__ return __pyx_r; } -/* "aiohttp/_http_parser.pyx":214 +/* "aiohttp/_http_parser.pyx":215 * cdef readonly int code * cdef readonly str reason * cdef readonly object headers # CIMultiDict # <<<<<<<<<<<<<< @@ -8925,7 +8973,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_7headers_ return __pyx_r; } -/* "aiohttp/_http_parser.pyx":215 +/* "aiohttp/_http_parser.pyx":216 * cdef readonly str reason * cdef readonly object headers # CIMultiDict * cdef readonly object raw_headers # tuple # <<<<<<<<<<<<<< @@ -8964,7 +9012,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_11raw_hea return __pyx_r; } -/* "aiohttp/_http_parser.pyx":216 +/* "aiohttp/_http_parser.pyx":217 * cdef readonly object headers # CIMultiDict * cdef readonly object raw_headers # tuple * cdef readonly object should_close # <<<<<<<<<<<<<< @@ -9003,7 +9051,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_12should_ return __pyx_r; } -/* "aiohttp/_http_parser.pyx":217 +/* "aiohttp/_http_parser.pyx":218 * cdef readonly object raw_headers # tuple * cdef readonly object should_close * cdef readonly object compression # <<<<<<<<<<<<<< @@ -9042,7 +9090,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_11compres return __pyx_r; } -/* "aiohttp/_http_parser.pyx":218 +/* "aiohttp/_http_parser.pyx":219 * cdef readonly object should_close * cdef readonly object compression * cdef readonly object upgrade # <<<<<<<<<<<<<< @@ -9081,7 +9129,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_7upgrade_ return __pyx_r; } -/* "aiohttp/_http_parser.pyx":219 +/* "aiohttp/_http_parser.pyx":220 * cdef readonly object compression * cdef readonly object upgrade * cdef readonly object chunked # <<<<<<<<<<<<<< @@ -9592,7 +9640,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_6__setsta return __pyx_r; } -/* "aiohttp/_http_parser.pyx":248 +/* "aiohttp/_http_parser.pyx":249 * * * cdef _new_response_message(object version, # <<<<<<<<<<<<<< @@ -9610,19 +9658,19 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser__new_response_message(PyObject int __pyx_clineno = 0; __Pyx_RefNannySetupContext("_new_response_message", 1); - /* "aiohttp/_http_parser.pyx":258 + /* "aiohttp/_http_parser.pyx":259 * bint chunked): * cdef RawResponseMessage ret * ret = RawResponseMessage.__new__(RawResponseMessage) # <<<<<<<<<<<<<< * ret.version = version * ret.code = code */ - __pyx_t_1 = ((PyObject *)__pyx_tp_new_7aiohttp_12_http_parser_RawResponseMessage(((PyTypeObject *)__pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage), __pyx_empty_tuple, NULL)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 258, __pyx_L1_error) + __pyx_t_1 = ((PyObject *)__pyx_tp_new_7aiohttp_12_http_parser_RawResponseMessage(((PyTypeObject *)__pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage), __pyx_empty_tuple, NULL)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 259, __pyx_L1_error) __Pyx_GOTREF((PyObject *)__pyx_t_1); __pyx_v_ret = ((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":259 + /* "aiohttp/_http_parser.pyx":260 * cdef RawResponseMessage ret * ret = RawResponseMessage.__new__(RawResponseMessage) * ret.version = version # <<<<<<<<<<<<<< @@ -9635,7 +9683,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser__new_response_message(PyObject __Pyx_DECREF(__pyx_v_ret->version); __pyx_v_ret->version = __pyx_v_version; - /* "aiohttp/_http_parser.pyx":260 + /* "aiohttp/_http_parser.pyx":261 * ret = RawResponseMessage.__new__(RawResponseMessage) * ret.version = version * ret.code = code # <<<<<<<<<<<<<< @@ -9644,7 +9692,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser__new_response_message(PyObject */ __pyx_v_ret->code = __pyx_v_code; - /* "aiohttp/_http_parser.pyx":261 + /* "aiohttp/_http_parser.pyx":262 * ret.version = version * ret.code = code * ret.reason = reason # <<<<<<<<<<<<<< @@ -9657,7 +9705,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser__new_response_message(PyObject __Pyx_DECREF(__pyx_v_ret->reason); __pyx_v_ret->reason = __pyx_v_reason; - /* "aiohttp/_http_parser.pyx":262 + /* "aiohttp/_http_parser.pyx":263 * ret.code = code * ret.reason = reason * ret.headers = headers # <<<<<<<<<<<<<< @@ -9670,7 +9718,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser__new_response_message(PyObject __Pyx_DECREF(__pyx_v_ret->headers); __pyx_v_ret->headers = __pyx_v_headers; - /* "aiohttp/_http_parser.pyx":263 + /* "aiohttp/_http_parser.pyx":264 * ret.reason = reason * ret.headers = headers * ret.raw_headers = raw_headers # <<<<<<<<<<<<<< @@ -9683,14 +9731,14 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser__new_response_message(PyObject __Pyx_DECREF(__pyx_v_ret->raw_headers); __pyx_v_ret->raw_headers = __pyx_v_raw_headers; - /* "aiohttp/_http_parser.pyx":264 + /* "aiohttp/_http_parser.pyx":265 * ret.headers = headers * ret.raw_headers = raw_headers * ret.should_close = should_close # <<<<<<<<<<<<<< * ret.compression = compression * ret.upgrade = upgrade */ - __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_should_close); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 264, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_should_close); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 265, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_GIVEREF(__pyx_t_1); __Pyx_GOTREF(__pyx_v_ret->should_close); @@ -9698,7 +9746,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser__new_response_message(PyObject __pyx_v_ret->should_close = __pyx_t_1; __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":265 + /* "aiohttp/_http_parser.pyx":266 * ret.raw_headers = raw_headers * ret.should_close = should_close * ret.compression = compression # <<<<<<<<<<<<<< @@ -9711,14 +9759,14 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser__new_response_message(PyObject __Pyx_DECREF(__pyx_v_ret->compression); __pyx_v_ret->compression = __pyx_v_compression; - /* "aiohttp/_http_parser.pyx":266 + /* "aiohttp/_http_parser.pyx":267 * ret.should_close = should_close * ret.compression = compression * ret.upgrade = upgrade # <<<<<<<<<<<<<< * ret.chunked = chunked * return ret */ - __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_upgrade); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 266, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_upgrade); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 267, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_GIVEREF(__pyx_t_1); __Pyx_GOTREF(__pyx_v_ret->upgrade); @@ -9726,14 +9774,14 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser__new_response_message(PyObject __pyx_v_ret->upgrade = __pyx_t_1; __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":267 + /* "aiohttp/_http_parser.pyx":268 * ret.compression = compression * ret.upgrade = upgrade * ret.chunked = chunked # <<<<<<<<<<<<<< * return ret * */ - __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_chunked); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 267, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_chunked); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 268, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_GIVEREF(__pyx_t_1); __Pyx_GOTREF(__pyx_v_ret->chunked); @@ -9741,7 +9789,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser__new_response_message(PyObject __pyx_v_ret->chunked = __pyx_t_1; __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":268 + /* "aiohttp/_http_parser.pyx":269 * ret.upgrade = upgrade * ret.chunked = chunked * return ret # <<<<<<<<<<<<<< @@ -9753,7 +9801,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser__new_response_message(PyObject __pyx_r = ((PyObject *)__pyx_v_ret); goto __pyx_L0; - /* "aiohttp/_http_parser.pyx":248 + /* "aiohttp/_http_parser.pyx":249 * * * cdef _new_response_message(object version, # <<<<<<<<<<<<<< @@ -9773,7 +9821,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser__new_response_message(PyObject return __pyx_r; } -/* "aiohttp/_http_parser.pyx":312 +/* "aiohttp/_http_parser.pyx":313 * Py_buffer py_buf * * def __cinit__(self): # <<<<<<<<<<<<<< @@ -9812,7 +9860,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_10HttpParser___cinit__(struct __pyx_ const char *__pyx_filename = NULL; int __pyx_clineno = 0; - /* "aiohttp/_http_parser.pyx":313 + /* "aiohttp/_http_parser.pyx":314 * * def __cinit__(self): * self._cparser = \ # <<<<<<<<<<<<<< @@ -9821,7 +9869,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_10HttpParser___cinit__(struct __pyx_ */ __pyx_v_self->_cparser = ((llhttp_t *)PyMem_Malloc((sizeof(llhttp_t)))); - /* "aiohttp/_http_parser.pyx":315 + /* "aiohttp/_http_parser.pyx":316 * self._cparser = \ * PyMem_Malloc(sizeof(cparser.llhttp_t)) * if self._cparser is NULL: # <<<<<<<<<<<<<< @@ -9831,16 +9879,16 @@ static int __pyx_pf_7aiohttp_12_http_parser_10HttpParser___cinit__(struct __pyx_ __pyx_t_1 = (__pyx_v_self->_cparser == NULL); if (unlikely(__pyx_t_1)) { - /* "aiohttp/_http_parser.pyx":316 + /* "aiohttp/_http_parser.pyx":317 * PyMem_Malloc(sizeof(cparser.llhttp_t)) * if self._cparser is NULL: * raise MemoryError() # <<<<<<<<<<<<<< * * self._csettings = \ */ - PyErr_NoMemory(); __PYX_ERR(0, 316, __pyx_L1_error) + PyErr_NoMemory(); __PYX_ERR(0, 317, __pyx_L1_error) - /* "aiohttp/_http_parser.pyx":315 + /* "aiohttp/_http_parser.pyx":316 * self._cparser = \ * PyMem_Malloc(sizeof(cparser.llhttp_t)) * if self._cparser is NULL: # <<<<<<<<<<<<<< @@ -9849,7 +9897,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_10HttpParser___cinit__(struct __pyx_ */ } - /* "aiohttp/_http_parser.pyx":318 + /* "aiohttp/_http_parser.pyx":319 * raise MemoryError() * * self._csettings = \ # <<<<<<<<<<<<<< @@ -9858,7 +9906,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_10HttpParser___cinit__(struct __pyx_ */ __pyx_v_self->_csettings = ((llhttp_settings_t *)PyMem_Malloc((sizeof(llhttp_settings_t)))); - /* "aiohttp/_http_parser.pyx":320 + /* "aiohttp/_http_parser.pyx":321 * self._csettings = \ * PyMem_Malloc(sizeof(cparser.llhttp_settings_t)) * if self._csettings is NULL: # <<<<<<<<<<<<<< @@ -9868,16 +9916,16 @@ static int __pyx_pf_7aiohttp_12_http_parser_10HttpParser___cinit__(struct __pyx_ __pyx_t_1 = (__pyx_v_self->_csettings == NULL); if (unlikely(__pyx_t_1)) { - /* "aiohttp/_http_parser.pyx":321 + /* "aiohttp/_http_parser.pyx":322 * PyMem_Malloc(sizeof(cparser.llhttp_settings_t)) * if self._csettings is NULL: * raise MemoryError() # <<<<<<<<<<<<<< * * def __dealloc__(self): */ - PyErr_NoMemory(); __PYX_ERR(0, 321, __pyx_L1_error) + PyErr_NoMemory(); __PYX_ERR(0, 322, __pyx_L1_error) - /* "aiohttp/_http_parser.pyx":320 + /* "aiohttp/_http_parser.pyx":321 * self._csettings = \ * PyMem_Malloc(sizeof(cparser.llhttp_settings_t)) * if self._csettings is NULL: # <<<<<<<<<<<<<< @@ -9886,7 +9934,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_10HttpParser___cinit__(struct __pyx_ */ } - /* "aiohttp/_http_parser.pyx":312 + /* "aiohttp/_http_parser.pyx":313 * Py_buffer py_buf * * def __cinit__(self): # <<<<<<<<<<<<<< @@ -9904,7 +9952,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_10HttpParser___cinit__(struct __pyx_ return __pyx_r; } -/* "aiohttp/_http_parser.pyx":323 +/* "aiohttp/_http_parser.pyx":324 * raise MemoryError() * * def __dealloc__(self): # <<<<<<<<<<<<<< @@ -9927,7 +9975,7 @@ static void __pyx_pw_7aiohttp_12_http_parser_10HttpParser_3__dealloc__(PyObject static void __pyx_pf_7aiohttp_12_http_parser_10HttpParser_2__dealloc__(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { - /* "aiohttp/_http_parser.pyx":324 + /* "aiohttp/_http_parser.pyx":325 * * def __dealloc__(self): * PyMem_Free(self._cparser) # <<<<<<<<<<<<<< @@ -9936,7 +9984,7 @@ static void __pyx_pf_7aiohttp_12_http_parser_10HttpParser_2__dealloc__(struct __ */ PyMem_Free(__pyx_v_self->_cparser); - /* "aiohttp/_http_parser.pyx":325 + /* "aiohttp/_http_parser.pyx":326 * def __dealloc__(self): * PyMem_Free(self._cparser) * PyMem_Free(self._csettings) # <<<<<<<<<<<<<< @@ -9945,7 +9993,7 @@ static void __pyx_pf_7aiohttp_12_http_parser_10HttpParser_2__dealloc__(struct __ */ PyMem_Free(__pyx_v_self->_csettings); - /* "aiohttp/_http_parser.pyx":323 + /* "aiohttp/_http_parser.pyx":324 * raise MemoryError() * * def __dealloc__(self): # <<<<<<<<<<<<<< @@ -9956,7 +10004,7 @@ static void __pyx_pf_7aiohttp_12_http_parser_10HttpParser_2__dealloc__(struct __ /* function exit code */ } -/* "aiohttp/_http_parser.pyx":327 +/* "aiohttp/_http_parser.pyx":328 * PyMem_Free(self._csettings) * * cdef _init( # <<<<<<<<<<<<<< @@ -9966,7 +10014,7 @@ static void __pyx_pf_7aiohttp_12_http_parser_10HttpParser_2__dealloc__(struct __ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self, enum llhttp_type __pyx_v_mode, PyObject *__pyx_v_protocol, PyObject *__pyx_v_loop, int __pyx_v_limit, struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__init *__pyx_optional_args) { - /* "aiohttp/_http_parser.pyx":330 + /* "aiohttp/_http_parser.pyx":331 * self, cparser.llhttp_type mode, * object protocol, object loop, int limit, * object timer=None, # <<<<<<<<<<<<<< @@ -9978,7 +10026,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx size_t __pyx_v_max_headers = ((size_t)0x8000); size_t __pyx_v_max_field_size = ((size_t)0x1FFE); - /* "aiohttp/_http_parser.pyx":332 + /* "aiohttp/_http_parser.pyx":333 * object timer=None, * size_t max_line_size=8190, size_t max_headers=32768, * size_t max_field_size=8190, payload_exception=None, # <<<<<<<<<<<<<< @@ -9987,7 +10035,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx */ PyObject *__pyx_v_payload_exception = ((PyObject *)Py_None); - /* "aiohttp/_http_parser.pyx":333 + /* "aiohttp/_http_parser.pyx":334 * size_t max_line_size=8190, size_t max_headers=32768, * size_t max_field_size=8190, payload_exception=None, * bint response_with_body=True, bint read_until_eof=False, # <<<<<<<<<<<<<< @@ -9997,7 +10045,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx int __pyx_v_response_with_body = ((int)1); int __pyx_v_read_until_eof = ((int)0); - /* "aiohttp/_http_parser.pyx":334 + /* "aiohttp/_http_parser.pyx":335 * size_t max_field_size=8190, payload_exception=None, * bint response_with_body=True, bint read_until_eof=False, * bint auto_decompress=True, # <<<<<<<<<<<<<< @@ -10039,7 +10087,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx } } - /* "aiohttp/_http_parser.pyx":336 + /* "aiohttp/_http_parser.pyx":337 * bint auto_decompress=True, * ): * cparser.llhttp_settings_init(self._csettings) # <<<<<<<<<<<<<< @@ -10048,7 +10096,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx */ llhttp_settings_init(__pyx_v_self->_csettings); - /* "aiohttp/_http_parser.pyx":337 + /* "aiohttp/_http_parser.pyx":338 * ): * cparser.llhttp_settings_init(self._csettings) * cparser.llhttp_init(self._cparser, mode, self._csettings) # <<<<<<<<<<<<<< @@ -10057,7 +10105,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx */ llhttp_init(__pyx_v_self->_cparser, __pyx_v_mode, __pyx_v_self->_csettings); - /* "aiohttp/_http_parser.pyx":338 + /* "aiohttp/_http_parser.pyx":339 * cparser.llhttp_settings_init(self._csettings) * cparser.llhttp_init(self._cparser, mode, self._csettings) * self._cparser.data = self # <<<<<<<<<<<<<< @@ -10066,7 +10114,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx */ __pyx_v_self->_cparser->data = ((void *)__pyx_v_self); - /* "aiohttp/_http_parser.pyx":339 + /* "aiohttp/_http_parser.pyx":340 * cparser.llhttp_init(self._cparser, mode, self._csettings) * self._cparser.data = self * self._cparser.content_length = 0 # <<<<<<<<<<<<<< @@ -10075,7 +10123,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx */ __pyx_v_self->_cparser->content_length = 0; - /* "aiohttp/_http_parser.pyx":341 + /* "aiohttp/_http_parser.pyx":342 * self._cparser.content_length = 0 * * self._protocol = protocol # <<<<<<<<<<<<<< @@ -10088,7 +10136,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx __Pyx_DECREF(__pyx_v_self->_protocol); __pyx_v_self->_protocol = __pyx_v_protocol; - /* "aiohttp/_http_parser.pyx":342 + /* "aiohttp/_http_parser.pyx":343 * * self._protocol = protocol * self._loop = loop # <<<<<<<<<<<<<< @@ -10101,7 +10149,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx __Pyx_DECREF(__pyx_v_self->_loop); __pyx_v_self->_loop = __pyx_v_loop; - /* "aiohttp/_http_parser.pyx":343 + /* "aiohttp/_http_parser.pyx":344 * self._protocol = protocol * self._loop = loop * self._timer = timer # <<<<<<<<<<<<<< @@ -10114,14 +10162,14 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx __Pyx_DECREF(__pyx_v_self->_timer); __pyx_v_self->_timer = __pyx_v_timer; - /* "aiohttp/_http_parser.pyx":345 + /* "aiohttp/_http_parser.pyx":346 * self._timer = timer * * self._buf = bytearray() # <<<<<<<<<<<<<< * self._payload = None * self._payload_error = 0 */ - __pyx_t_1 = __Pyx_PyObject_CallNoArg(((PyObject *)(&PyByteArray_Type))); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 345, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_CallNoArg(((PyObject *)(&PyByteArray_Type))); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 346, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_GIVEREF(__pyx_t_1); __Pyx_GOTREF(__pyx_v_self->_buf); @@ -10129,7 +10177,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx __pyx_v_self->_buf = ((PyObject*)__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":346 + /* "aiohttp/_http_parser.pyx":347 * * self._buf = bytearray() * self._payload = None # <<<<<<<<<<<<<< @@ -10142,7 +10190,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx __Pyx_DECREF(__pyx_v_self->_payload); __pyx_v_self->_payload = Py_None; - /* "aiohttp/_http_parser.pyx":347 + /* "aiohttp/_http_parser.pyx":348 * self._buf = bytearray() * self._payload = None * self._payload_error = 0 # <<<<<<<<<<<<<< @@ -10151,7 +10199,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx */ __pyx_v_self->_payload_error = 0; - /* "aiohttp/_http_parser.pyx":348 + /* "aiohttp/_http_parser.pyx":349 * self._payload = None * self._payload_error = 0 * self._payload_exception = payload_exception # <<<<<<<<<<<<<< @@ -10164,14 +10212,14 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx __Pyx_DECREF(__pyx_v_self->_payload_exception); __pyx_v_self->_payload_exception = __pyx_v_payload_exception; - /* "aiohttp/_http_parser.pyx":349 + /* "aiohttp/_http_parser.pyx":350 * self._payload_error = 0 * self._payload_exception = payload_exception * self._messages = [] # <<<<<<<<<<<<<< * * self._raw_name = bytearray() */ - __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 349, __pyx_L1_error) + __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 350, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_GIVEREF(__pyx_t_1); __Pyx_GOTREF(__pyx_v_self->_messages); @@ -10179,14 +10227,14 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx __pyx_v_self->_messages = ((PyObject*)__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":351 + /* "aiohttp/_http_parser.pyx":352 * self._messages = [] * * self._raw_name = bytearray() # <<<<<<<<<<<<<< * self._raw_value = bytearray() * self._has_value = False */ - __pyx_t_1 = __Pyx_PyObject_CallNoArg(((PyObject *)(&PyByteArray_Type))); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 351, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_CallNoArg(((PyObject *)(&PyByteArray_Type))); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 352, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_GIVEREF(__pyx_t_1); __Pyx_GOTREF(__pyx_v_self->_raw_name); @@ -10194,14 +10242,14 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx __pyx_v_self->_raw_name = ((PyObject*)__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":352 + /* "aiohttp/_http_parser.pyx":353 * * self._raw_name = bytearray() * self._raw_value = bytearray() # <<<<<<<<<<<<<< * self._has_value = False * */ - __pyx_t_1 = __Pyx_PyObject_CallNoArg(((PyObject *)(&PyByteArray_Type))); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 352, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_CallNoArg(((PyObject *)(&PyByteArray_Type))); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 353, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_GIVEREF(__pyx_t_1); __Pyx_GOTREF(__pyx_v_self->_raw_value); @@ -10209,7 +10257,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx __pyx_v_self->_raw_value = ((PyObject*)__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":353 + /* "aiohttp/_http_parser.pyx":354 * self._raw_name = bytearray() * self._raw_value = bytearray() * self._has_value = False # <<<<<<<<<<<<<< @@ -10218,7 +10266,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx */ __pyx_v_self->_has_value = 0; - /* "aiohttp/_http_parser.pyx":355 + /* "aiohttp/_http_parser.pyx":356 * self._has_value = False * * self._max_line_size = max_line_size # <<<<<<<<<<<<<< @@ -10227,7 +10275,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx */ __pyx_v_self->_max_line_size = __pyx_v_max_line_size; - /* "aiohttp/_http_parser.pyx":356 + /* "aiohttp/_http_parser.pyx":357 * * self._max_line_size = max_line_size * self._max_headers = max_headers # <<<<<<<<<<<<<< @@ -10236,7 +10284,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx */ __pyx_v_self->_max_headers = __pyx_v_max_headers; - /* "aiohttp/_http_parser.pyx":357 + /* "aiohttp/_http_parser.pyx":358 * self._max_line_size = max_line_size * self._max_headers = max_headers * self._max_field_size = max_field_size # <<<<<<<<<<<<<< @@ -10245,7 +10293,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx */ __pyx_v_self->_max_field_size = __pyx_v_max_field_size; - /* "aiohttp/_http_parser.pyx":358 + /* "aiohttp/_http_parser.pyx":359 * self._max_headers = max_headers * self._max_field_size = max_field_size * self._response_with_body = response_with_body # <<<<<<<<<<<<<< @@ -10254,7 +10302,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx */ __pyx_v_self->_response_with_body = __pyx_v_response_with_body; - /* "aiohttp/_http_parser.pyx":359 + /* "aiohttp/_http_parser.pyx":360 * self._max_field_size = max_field_size * self._response_with_body = response_with_body * self._read_until_eof = read_until_eof # <<<<<<<<<<<<<< @@ -10263,7 +10311,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx */ __pyx_v_self->_read_until_eof = __pyx_v_read_until_eof; - /* "aiohttp/_http_parser.pyx":360 + /* "aiohttp/_http_parser.pyx":361 * self._response_with_body = response_with_body * self._read_until_eof = read_until_eof * self._upgraded = False # <<<<<<<<<<<<<< @@ -10272,7 +10320,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx */ __pyx_v_self->_upgraded = 0; - /* "aiohttp/_http_parser.pyx":361 + /* "aiohttp/_http_parser.pyx":362 * self._read_until_eof = read_until_eof * self._upgraded = False * self._auto_decompress = auto_decompress # <<<<<<<<<<<<<< @@ -10281,7 +10329,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx */ __pyx_v_self->_auto_decompress = __pyx_v_auto_decompress; - /* "aiohttp/_http_parser.pyx":362 + /* "aiohttp/_http_parser.pyx":363 * self._upgraded = False * self._auto_decompress = auto_decompress * self._content_encoding = None # <<<<<<<<<<<<<< @@ -10294,7 +10342,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx __Pyx_DECREF(__pyx_v_self->_content_encoding); __pyx_v_self->_content_encoding = ((PyObject*)Py_None); - /* "aiohttp/_http_parser.pyx":364 + /* "aiohttp/_http_parser.pyx":365 * self._content_encoding = None * * self._csettings.on_url = cb_on_url # <<<<<<<<<<<<<< @@ -10303,7 +10351,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx */ __pyx_v_self->_csettings->on_url = __pyx_f_7aiohttp_12_http_parser_cb_on_url; - /* "aiohttp/_http_parser.pyx":365 + /* "aiohttp/_http_parser.pyx":366 * * self._csettings.on_url = cb_on_url * self._csettings.on_status = cb_on_status # <<<<<<<<<<<<<< @@ -10312,7 +10360,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx */ __pyx_v_self->_csettings->on_status = __pyx_f_7aiohttp_12_http_parser_cb_on_status; - /* "aiohttp/_http_parser.pyx":366 + /* "aiohttp/_http_parser.pyx":367 * self._csettings.on_url = cb_on_url * self._csettings.on_status = cb_on_status * self._csettings.on_header_field = cb_on_header_field # <<<<<<<<<<<<<< @@ -10321,7 +10369,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx */ __pyx_v_self->_csettings->on_header_field = __pyx_f_7aiohttp_12_http_parser_cb_on_header_field; - /* "aiohttp/_http_parser.pyx":367 + /* "aiohttp/_http_parser.pyx":368 * self._csettings.on_status = cb_on_status * self._csettings.on_header_field = cb_on_header_field * self._csettings.on_header_value = cb_on_header_value # <<<<<<<<<<<<<< @@ -10330,7 +10378,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx */ __pyx_v_self->_csettings->on_header_value = __pyx_f_7aiohttp_12_http_parser_cb_on_header_value; - /* "aiohttp/_http_parser.pyx":368 + /* "aiohttp/_http_parser.pyx":369 * self._csettings.on_header_field = cb_on_header_field * self._csettings.on_header_value = cb_on_header_value * self._csettings.on_headers_complete = cb_on_headers_complete # <<<<<<<<<<<<<< @@ -10339,7 +10387,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx */ __pyx_v_self->_csettings->on_headers_complete = __pyx_f_7aiohttp_12_http_parser_cb_on_headers_complete; - /* "aiohttp/_http_parser.pyx":369 + /* "aiohttp/_http_parser.pyx":370 * self._csettings.on_header_value = cb_on_header_value * self._csettings.on_headers_complete = cb_on_headers_complete * self._csettings.on_body = cb_on_body # <<<<<<<<<<<<<< @@ -10348,7 +10396,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx */ __pyx_v_self->_csettings->on_body = __pyx_f_7aiohttp_12_http_parser_cb_on_body; - /* "aiohttp/_http_parser.pyx":370 + /* "aiohttp/_http_parser.pyx":371 * self._csettings.on_headers_complete = cb_on_headers_complete * self._csettings.on_body = cb_on_body * self._csettings.on_message_begin = cb_on_message_begin # <<<<<<<<<<<<<< @@ -10357,7 +10405,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx */ __pyx_v_self->_csettings->on_message_begin = __pyx_f_7aiohttp_12_http_parser_cb_on_message_begin; - /* "aiohttp/_http_parser.pyx":371 + /* "aiohttp/_http_parser.pyx":372 * self._csettings.on_body = cb_on_body * self._csettings.on_message_begin = cb_on_message_begin * self._csettings.on_message_complete = cb_on_message_complete # <<<<<<<<<<<<<< @@ -10366,7 +10414,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx */ __pyx_v_self->_csettings->on_message_complete = __pyx_f_7aiohttp_12_http_parser_cb_on_message_complete; - /* "aiohttp/_http_parser.pyx":372 + /* "aiohttp/_http_parser.pyx":373 * self._csettings.on_message_begin = cb_on_message_begin * self._csettings.on_message_complete = cb_on_message_complete * self._csettings.on_chunk_header = cb_on_chunk_header # <<<<<<<<<<<<<< @@ -10375,7 +10423,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx */ __pyx_v_self->_csettings->on_chunk_header = __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_header; - /* "aiohttp/_http_parser.pyx":373 + /* "aiohttp/_http_parser.pyx":374 * self._csettings.on_message_complete = cb_on_message_complete * self._csettings.on_chunk_header = cb_on_chunk_header * self._csettings.on_chunk_complete = cb_on_chunk_complete # <<<<<<<<<<<<<< @@ -10384,7 +10432,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx */ __pyx_v_self->_csettings->on_chunk_complete = __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_complete; - /* "aiohttp/_http_parser.pyx":375 + /* "aiohttp/_http_parser.pyx":376 * self._csettings.on_chunk_complete = cb_on_chunk_complete * * self._last_error = None # <<<<<<<<<<<<<< @@ -10397,7 +10445,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx __Pyx_DECREF(__pyx_v_self->_last_error); __pyx_v_self->_last_error = Py_None; - /* "aiohttp/_http_parser.pyx":376 + /* "aiohttp/_http_parser.pyx":377 * * self._last_error = None * self._limit = limit # <<<<<<<<<<<<<< @@ -10406,7 +10454,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx */ __pyx_v_self->_limit = __pyx_v_limit; - /* "aiohttp/_http_parser.pyx":327 + /* "aiohttp/_http_parser.pyx":328 * PyMem_Free(self._csettings) * * cdef _init( # <<<<<<<<<<<<<< @@ -10427,7 +10475,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx return __pyx_r; } -/* "aiohttp/_http_parser.pyx":378 +/* "aiohttp/_http_parser.pyx":379 * self._limit = limit * * cdef _process_header(self): # <<<<<<<<<<<<<< @@ -10453,7 +10501,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__process_header(st int __pyx_clineno = 0; __Pyx_RefNannySetupContext("_process_header", 1); - /* "aiohttp/_http_parser.pyx":379 + /* "aiohttp/_http_parser.pyx":380 * * cdef _process_header(self): * if self._raw_name: # <<<<<<<<<<<<<< @@ -10463,62 +10511,62 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__process_header(st __pyx_t_1 = (__pyx_v_self->_raw_name != Py_None)&&(PyByteArray_GET_SIZE(__pyx_v_self->_raw_name) != 0); if (__pyx_t_1) { - /* "aiohttp/_http_parser.pyx":380 + /* "aiohttp/_http_parser.pyx":381 * cdef _process_header(self): * if self._raw_name: * raw_name = bytes(self._raw_name) # <<<<<<<<<<<<<< * raw_value = bytes(self._raw_value) * */ - __pyx_t_2 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyBytes_Type)), __pyx_v_self->_raw_name); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 380, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyBytes_Type)), __pyx_v_self->_raw_name); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 381, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_v_raw_name = ((PyObject*)__pyx_t_2); __pyx_t_2 = 0; - /* "aiohttp/_http_parser.pyx":381 + /* "aiohttp/_http_parser.pyx":382 * if self._raw_name: * raw_name = bytes(self._raw_name) * raw_value = bytes(self._raw_value) # <<<<<<<<<<<<<< * * name = find_header(raw_name) */ - __pyx_t_2 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyBytes_Type)), __pyx_v_self->_raw_value); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 381, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyBytes_Type)), __pyx_v_self->_raw_value); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 382, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_v_raw_value = ((PyObject*)__pyx_t_2); __pyx_t_2 = 0; - /* "aiohttp/_http_parser.pyx":383 + /* "aiohttp/_http_parser.pyx":384 * raw_value = bytes(self._raw_value) * * name = find_header(raw_name) # <<<<<<<<<<<<<< * value = raw_value.decode('utf-8', 'surrogateescape') * */ - __pyx_t_2 = __pyx_f_7aiohttp_12_http_parser_find_header(__pyx_v_raw_name); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 383, __pyx_L1_error) + __pyx_t_2 = __pyx_f_7aiohttp_12_http_parser_find_header(__pyx_v_raw_name); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 384, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_v_name = __pyx_t_2; __pyx_t_2 = 0; - /* "aiohttp/_http_parser.pyx":384 + /* "aiohttp/_http_parser.pyx":385 * * name = find_header(raw_name) * value = raw_value.decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< * * self._headers.add(name, value) */ - __pyx_t_2 = __Pyx_decode_bytes(__pyx_v_raw_value, 0, PY_SSIZE_T_MAX, NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 384, __pyx_L1_error) + __pyx_t_2 = __Pyx_decode_bytes(__pyx_v_raw_value, 0, PY_SSIZE_T_MAX, NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 385, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_v_value = __pyx_t_2; __pyx_t_2 = 0; - /* "aiohttp/_http_parser.pyx":386 + /* "aiohttp/_http_parser.pyx":387 * value = raw_value.decode('utf-8', 'surrogateescape') * * self._headers.add(name, value) # <<<<<<<<<<<<<< * * if name is CONTENT_ENCODING: */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_headers, __pyx_n_s_add); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 386, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_headers, __pyx_n_s_add); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 387, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = NULL; __pyx_t_5 = 0; @@ -10538,13 +10586,13 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__process_header(st PyObject *__pyx_callargs[3] = {__pyx_t_4, __pyx_v_name, __pyx_v_value}; __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 2+__pyx_t_5); __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 386, __pyx_L1_error) + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 387, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; } __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - /* "aiohttp/_http_parser.pyx":388 + /* "aiohttp/_http_parser.pyx":389 * self._headers.add(name, value) * * if name is CONTENT_ENCODING: # <<<<<<<<<<<<<< @@ -10554,14 +10602,14 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__process_header(st __pyx_t_1 = (__pyx_v_name == __pyx_v_7aiohttp_12_http_parser_CONTENT_ENCODING); if (__pyx_t_1) { - /* "aiohttp/_http_parser.pyx":389 + /* "aiohttp/_http_parser.pyx":390 * * if name is CONTENT_ENCODING: * self._content_encoding = value # <<<<<<<<<<<<<< * * PyByteArray_Resize(self._raw_name, 0) */ - if (!(likely(PyUnicode_CheckExact(__pyx_v_value))||((__pyx_v_value) == Py_None) || __Pyx_RaiseUnexpectedTypeError("unicode", __pyx_v_value))) __PYX_ERR(0, 389, __pyx_L1_error) + if (!(likely(PyUnicode_CheckExact(__pyx_v_value))||((__pyx_v_value) == Py_None) || __Pyx_RaiseUnexpectedTypeError("unicode", __pyx_v_value))) __PYX_ERR(0, 390, __pyx_L1_error) __pyx_t_2 = __pyx_v_value; __Pyx_INCREF(__pyx_t_2); __Pyx_GIVEREF(__pyx_t_2); @@ -10570,7 +10618,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__process_header(st __pyx_v_self->_content_encoding = ((PyObject*)__pyx_t_2); __pyx_t_2 = 0; - /* "aiohttp/_http_parser.pyx":388 + /* "aiohttp/_http_parser.pyx":389 * self._headers.add(name, value) * * if name is CONTENT_ENCODING: # <<<<<<<<<<<<<< @@ -10579,7 +10627,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__process_header(st */ } - /* "aiohttp/_http_parser.pyx":391 + /* "aiohttp/_http_parser.pyx":392 * self._content_encoding = value * * PyByteArray_Resize(self._raw_name, 0) # <<<<<<<<<<<<<< @@ -10588,10 +10636,10 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__process_header(st */ __pyx_t_2 = __pyx_v_self->_raw_name; __Pyx_INCREF(__pyx_t_2); - __pyx_t_5 = PyByteArray_Resize(__pyx_t_2, 0); if (unlikely(__pyx_t_5 == ((int)-1))) __PYX_ERR(0, 391, __pyx_L1_error) + __pyx_t_5 = PyByteArray_Resize(__pyx_t_2, 0); if (unlikely(__pyx_t_5 == ((int)-1))) __PYX_ERR(0, 392, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - /* "aiohttp/_http_parser.pyx":392 + /* "aiohttp/_http_parser.pyx":393 * * PyByteArray_Resize(self._raw_name, 0) * PyByteArray_Resize(self._raw_value, 0) # <<<<<<<<<<<<<< @@ -10600,10 +10648,10 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__process_header(st */ __pyx_t_2 = __pyx_v_self->_raw_value; __Pyx_INCREF(__pyx_t_2); - __pyx_t_5 = PyByteArray_Resize(__pyx_t_2, 0); if (unlikely(__pyx_t_5 == ((int)-1))) __PYX_ERR(0, 392, __pyx_L1_error) + __pyx_t_5 = PyByteArray_Resize(__pyx_t_2, 0); if (unlikely(__pyx_t_5 == ((int)-1))) __PYX_ERR(0, 393, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - /* "aiohttp/_http_parser.pyx":393 + /* "aiohttp/_http_parser.pyx":394 * PyByteArray_Resize(self._raw_name, 0) * PyByteArray_Resize(self._raw_value, 0) * self._has_value = False # <<<<<<<<<<<<<< @@ -10612,7 +10660,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__process_header(st */ __pyx_v_self->_has_value = 0; - /* "aiohttp/_http_parser.pyx":394 + /* "aiohttp/_http_parser.pyx":395 * PyByteArray_Resize(self._raw_value, 0) * self._has_value = False * self._raw_headers.append((raw_name, raw_value)) # <<<<<<<<<<<<<< @@ -10621,20 +10669,20 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__process_header(st */ if (unlikely(__pyx_v_self->_raw_headers == Py_None)) { PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "append"); - __PYX_ERR(0, 394, __pyx_L1_error) + __PYX_ERR(0, 395, __pyx_L1_error) } - __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 394, __pyx_L1_error) + __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 395, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_INCREF(__pyx_v_raw_name); __Pyx_GIVEREF(__pyx_v_raw_name); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_raw_name)) __PYX_ERR(0, 394, __pyx_L1_error); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_raw_name)) __PYX_ERR(0, 395, __pyx_L1_error); __Pyx_INCREF(__pyx_v_raw_value); __Pyx_GIVEREF(__pyx_v_raw_value); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_v_raw_value)) __PYX_ERR(0, 394, __pyx_L1_error); - __pyx_t_6 = __Pyx_PyList_Append(__pyx_v_self->_raw_headers, __pyx_t_2); if (unlikely(__pyx_t_6 == ((int)-1))) __PYX_ERR(0, 394, __pyx_L1_error) + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_v_raw_value)) __PYX_ERR(0, 395, __pyx_L1_error); + __pyx_t_6 = __Pyx_PyList_Append(__pyx_v_self->_raw_headers, __pyx_t_2); if (unlikely(__pyx_t_6 == ((int)-1))) __PYX_ERR(0, 395, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - /* "aiohttp/_http_parser.pyx":379 + /* "aiohttp/_http_parser.pyx":380 * * cdef _process_header(self): * if self._raw_name: # <<<<<<<<<<<<<< @@ -10643,7 +10691,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__process_header(st */ } - /* "aiohttp/_http_parser.pyx":378 + /* "aiohttp/_http_parser.pyx":379 * self._limit = limit * * cdef _process_header(self): # <<<<<<<<<<<<<< @@ -10670,7 +10718,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__process_header(st return __pyx_r; } -/* "aiohttp/_http_parser.pyx":396 +/* "aiohttp/_http_parser.pyx":397 * self._raw_headers.append((raw_name, raw_value)) * * cdef _on_header_field(self, char* at, size_t length): # <<<<<<<<<<<<<< @@ -10691,7 +10739,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_field(s int __pyx_clineno = 0; __Pyx_RefNannySetupContext("_on_header_field", 1); - /* "aiohttp/_http_parser.pyx":399 + /* "aiohttp/_http_parser.pyx":400 * cdef Py_ssize_t size * cdef char *buf * if self._has_value: # <<<<<<<<<<<<<< @@ -10700,18 +10748,18 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_field(s */ if (__pyx_v_self->_has_value) { - /* "aiohttp/_http_parser.pyx":400 + /* "aiohttp/_http_parser.pyx":401 * cdef char *buf * if self._has_value: * self._process_header() # <<<<<<<<<<<<<< * * size = PyByteArray_Size(self._raw_name) */ - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self->__pyx_vtab)->_process_header(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 400, __pyx_L1_error) + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self->__pyx_vtab)->_process_header(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 401, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":399 + /* "aiohttp/_http_parser.pyx":400 * cdef Py_ssize_t size * cdef char *buf * if self._has_value: # <<<<<<<<<<<<<< @@ -10720,7 +10768,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_field(s */ } - /* "aiohttp/_http_parser.pyx":402 + /* "aiohttp/_http_parser.pyx":403 * self._process_header() * * size = PyByteArray_Size(self._raw_name) # <<<<<<<<<<<<<< @@ -10729,11 +10777,11 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_field(s */ __pyx_t_1 = __pyx_v_self->_raw_name; __Pyx_INCREF(__pyx_t_1); - __pyx_t_2 = PyByteArray_Size(__pyx_t_1); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1L))) __PYX_ERR(0, 402, __pyx_L1_error) + __pyx_t_2 = PyByteArray_Size(__pyx_t_1); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1L))) __PYX_ERR(0, 403, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_v_size = __pyx_t_2; - /* "aiohttp/_http_parser.pyx":403 + /* "aiohttp/_http_parser.pyx":404 * * size = PyByteArray_Size(self._raw_name) * PyByteArray_Resize(self._raw_name, size + length) # <<<<<<<<<<<<<< @@ -10742,10 +10790,10 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_field(s */ __pyx_t_1 = __pyx_v_self->_raw_name; __Pyx_INCREF(__pyx_t_1); - __pyx_t_3 = PyByteArray_Resize(__pyx_t_1, (__pyx_v_size + __pyx_v_length)); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(0, 403, __pyx_L1_error) + __pyx_t_3 = PyByteArray_Resize(__pyx_t_1, (__pyx_v_size + __pyx_v_length)); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(0, 404, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":404 + /* "aiohttp/_http_parser.pyx":405 * size = PyByteArray_Size(self._raw_name) * PyByteArray_Resize(self._raw_name, size + length) * buf = PyByteArray_AsString(self._raw_name) # <<<<<<<<<<<<<< @@ -10757,7 +10805,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_field(s __pyx_v_buf = PyByteArray_AsString(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":405 + /* "aiohttp/_http_parser.pyx":406 * PyByteArray_Resize(self._raw_name, size + length) * buf = PyByteArray_AsString(self._raw_name) * memcpy(buf + size, at, length) # <<<<<<<<<<<<<< @@ -10766,7 +10814,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_field(s */ (void)(memcpy((__pyx_v_buf + __pyx_v_size), __pyx_v_at, __pyx_v_length)); - /* "aiohttp/_http_parser.pyx":396 + /* "aiohttp/_http_parser.pyx":397 * self._raw_headers.append((raw_name, raw_value)) * * cdef _on_header_field(self, char* at, size_t length): # <<<<<<<<<<<<<< @@ -10787,7 +10835,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_field(s return __pyx_r; } -/* "aiohttp/_http_parser.pyx":407 +/* "aiohttp/_http_parser.pyx":408 * memcpy(buf + size, at, length) * * cdef _on_header_value(self, char* at, size_t length): # <<<<<<<<<<<<<< @@ -10808,7 +10856,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_value(s int __pyx_clineno = 0; __Pyx_RefNannySetupContext("_on_header_value", 1); - /* "aiohttp/_http_parser.pyx":411 + /* "aiohttp/_http_parser.pyx":412 * cdef char *buf * * size = PyByteArray_Size(self._raw_value) # <<<<<<<<<<<<<< @@ -10817,11 +10865,11 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_value(s */ __pyx_t_1 = __pyx_v_self->_raw_value; __Pyx_INCREF(__pyx_t_1); - __pyx_t_2 = PyByteArray_Size(__pyx_t_1); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1L))) __PYX_ERR(0, 411, __pyx_L1_error) + __pyx_t_2 = PyByteArray_Size(__pyx_t_1); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1L))) __PYX_ERR(0, 412, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_v_size = __pyx_t_2; - /* "aiohttp/_http_parser.pyx":412 + /* "aiohttp/_http_parser.pyx":413 * * size = PyByteArray_Size(self._raw_value) * PyByteArray_Resize(self._raw_value, size + length) # <<<<<<<<<<<<<< @@ -10830,10 +10878,10 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_value(s */ __pyx_t_1 = __pyx_v_self->_raw_value; __Pyx_INCREF(__pyx_t_1); - __pyx_t_3 = PyByteArray_Resize(__pyx_t_1, (__pyx_v_size + __pyx_v_length)); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(0, 412, __pyx_L1_error) + __pyx_t_3 = PyByteArray_Resize(__pyx_t_1, (__pyx_v_size + __pyx_v_length)); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(0, 413, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":413 + /* "aiohttp/_http_parser.pyx":414 * size = PyByteArray_Size(self._raw_value) * PyByteArray_Resize(self._raw_value, size + length) * buf = PyByteArray_AsString(self._raw_value) # <<<<<<<<<<<<<< @@ -10845,7 +10893,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_value(s __pyx_v_buf = PyByteArray_AsString(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":414 + /* "aiohttp/_http_parser.pyx":415 * PyByteArray_Resize(self._raw_value, size + length) * buf = PyByteArray_AsString(self._raw_value) * memcpy(buf + size, at, length) # <<<<<<<<<<<<<< @@ -10854,7 +10902,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_value(s */ (void)(memcpy((__pyx_v_buf + __pyx_v_size), __pyx_v_at, __pyx_v_length)); - /* "aiohttp/_http_parser.pyx":415 + /* "aiohttp/_http_parser.pyx":416 * buf = PyByteArray_AsString(self._raw_value) * memcpy(buf + size, at, length) * self._has_value = True # <<<<<<<<<<<<<< @@ -10863,7 +10911,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_value(s */ __pyx_v_self->_has_value = 1; - /* "aiohttp/_http_parser.pyx":407 + /* "aiohttp/_http_parser.pyx":408 * memcpy(buf + size, at, length) * * cdef _on_header_value(self, char* at, size_t length): # <<<<<<<<<<<<<< @@ -10884,7 +10932,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_value(s return __pyx_r; } -/* "aiohttp/_http_parser.pyx":417 +/* "aiohttp/_http_parser.pyx":418 * self._has_value = True * * cdef _on_headers_complete(self): # <<<<<<<<<<<<<< @@ -10893,14 +10941,15 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_value(s */ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { - PyObject *__pyx_v_method = NULL; int __pyx_v_should_close; uint8_t __pyx_v_upgrade; int __pyx_v_chunked; PyObject *__pyx_v_raw_headers = NULL; PyObject *__pyx_v_headers = NULL; + PyObject *__pyx_v_allowed = NULL; PyObject *__pyx_v_encoding = NULL; PyObject *__pyx_v_enc = NULL; + PyObject *__pyx_v_method = NULL; PyObject *__pyx_v_msg = NULL; PyObject *__pyx_v_payload = NULL; PyObject *__pyx_r = NULL; @@ -10911,40 +10960,28 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_comple PyObject *__pyx_t_4 = NULL; int __pyx_t_5; int __pyx_t_6; - int __pyx_t_7; - PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_7 = NULL; + int __pyx_t_8; int __pyx_t_9; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannySetupContext("_on_headers_complete", 1); - /* "aiohttp/_http_parser.pyx":418 + /* "aiohttp/_http_parser.pyx":419 * * cdef _on_headers_complete(self): * self._process_header() # <<<<<<<<<<<<<< * - * method = http_method_str(self._cparser.method) - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self->__pyx_vtab)->_process_header(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 418, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":420 - * self._process_header() - * - * method = http_method_str(self._cparser.method) # <<<<<<<<<<<<<< * should_close = not cparser.llhttp_should_keep_alive(self._cparser) - * upgrade = self._cparser.upgrade */ - __pyx_t_1 = __pyx_f_7aiohttp_12_http_parser_http_method_str(__pyx_v_self->_cparser->method); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 420, __pyx_L1_error) + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self->__pyx_vtab)->_process_header(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 419, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_v_method = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":421 + * self._process_header() * - * method = http_method_str(self._cparser.method) * should_close = not cparser.llhttp_should_keep_alive(self._cparser) # <<<<<<<<<<<<<< * upgrade = self._cparser.upgrade * chunked = self._cparser.flags & cparser.F_CHUNKED @@ -10952,7 +10989,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_comple __pyx_v_should_close = (!(llhttp_should_keep_alive(__pyx_v_self->_cparser) != 0)); /* "aiohttp/_http_parser.pyx":422 - * method = http_method_str(self._cparser.method) + * * should_close = not cparser.llhttp_should_keep_alive(self._cparser) * upgrade = self._cparser.upgrade # <<<<<<<<<<<<<< * chunked = self._cparser.flags & cparser.F_CHUNKED @@ -10991,7 +11028,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_comple * raw_headers = tuple(self._raw_headers) * headers = CIMultiDictProxy(self._headers) # <<<<<<<<<<<<<< * - * if upgrade or self._cparser.method == cparser.HTTP_CONNECT: + * if self._cparser.type == cparser.HTTP_REQUEST: */ __Pyx_INCREF(__pyx_v_7aiohttp_12_http_parser_CIMultiDictProxy); __pyx_t_3 = __pyx_v_7aiohttp_12_http_parser_CIMultiDictProxy; __pyx_t_4 = NULL; @@ -11022,85 +11059,202 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_comple /* "aiohttp/_http_parser.pyx":428 * headers = CIMultiDictProxy(self._headers) * - * if upgrade or self._cparser.method == cparser.HTTP_CONNECT: # <<<<<<<<<<<<<< - * self._upgraded = True - * + * if self._cparser.type == cparser.HTTP_REQUEST: # <<<<<<<<<<<<<< + * allowed = upgrade and headers.get("upgrade", "").lower() in ALLOWED_UPGRADES + * if allowed or self._cparser.method == cparser.HTTP_CONNECT: */ - __pyx_t_7 = (__pyx_v_upgrade != 0); - if (!__pyx_t_7) { - } else { - __pyx_t_6 = __pyx_t_7; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_7 = (__pyx_v_self->_cparser->method == HTTP_CONNECT); - __pyx_t_6 = __pyx_t_7; - __pyx_L4_bool_binop_done:; + __pyx_t_6 = (__pyx_v_self->_cparser->type == HTTP_REQUEST); if (__pyx_t_6) { /* "aiohttp/_http_parser.pyx":429 * - * if upgrade or self._cparser.method == cparser.HTTP_CONNECT: - * self._upgraded = True # <<<<<<<<<<<<<< - * - * # do not support old websocket spec + * if self._cparser.type == cparser.HTTP_REQUEST: + * allowed = upgrade and headers.get("upgrade", "").lower() in ALLOWED_UPGRADES # <<<<<<<<<<<<<< + * if allowed or self._cparser.method == cparser.HTTP_CONNECT: + * self._upgraded = True */ - __pyx_v_self->_upgraded = 1; + if (__pyx_v_upgrade) { + } else { + __pyx_t_3 = __Pyx_PyInt_From_uint8_t(__pyx_v_upgrade); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 429, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_headers, __pyx_n_s_get); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 429, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_7 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_tuple__5, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 429, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_lower); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 429, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_7, NULL}; + __pyx_t_3 = __Pyx_PyObject_FastCall(__pyx_t_4, __pyx_callargs+1-__pyx_t_5, 0+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 429, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_ALLOWED_UPGRADES); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 429, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_6 = (__Pyx_PySequence_ContainsTF(__pyx_t_3, __pyx_t_4, Py_EQ)); if (unlikely((__pyx_t_6 < 0))) __PYX_ERR(0, 429, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = __Pyx_PyBool_FromLong(__pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 429, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = __pyx_t_4; + __pyx_t_4 = 0; + __pyx_L4_bool_binop_done:; + __pyx_v_allowed = __pyx_t_1; + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":430 + * if self._cparser.type == cparser.HTTP_REQUEST: + * allowed = upgrade and headers.get("upgrade", "").lower() in ALLOWED_UPGRADES + * if allowed or self._cparser.method == cparser.HTTP_CONNECT: # <<<<<<<<<<<<<< + * self._upgraded = True + * else: + */ + __pyx_t_8 = __Pyx_PyObject_IsTrue(__pyx_v_allowed); if (unlikely((__pyx_t_8 < 0))) __PYX_ERR(0, 430, __pyx_L1_error) + if (!__pyx_t_8) { + } else { + __pyx_t_6 = __pyx_t_8; + goto __pyx_L7_bool_binop_done; + } + __pyx_t_8 = (__pyx_v_self->_cparser->method == HTTP_CONNECT); + __pyx_t_6 = __pyx_t_8; + __pyx_L7_bool_binop_done:; + if (__pyx_t_6) { + + /* "aiohttp/_http_parser.pyx":431 + * allowed = upgrade and headers.get("upgrade", "").lower() in ALLOWED_UPGRADES + * if allowed or self._cparser.method == cparser.HTTP_CONNECT: + * self._upgraded = True # <<<<<<<<<<<<<< + * else: + * if upgrade and self._cparser.status_code == 101: + */ + __pyx_v_self->_upgraded = 1; + + /* "aiohttp/_http_parser.pyx":430 + * if self._cparser.type == cparser.HTTP_REQUEST: + * allowed = upgrade and headers.get("upgrade", "").lower() in ALLOWED_UPGRADES + * if allowed or self._cparser.method == cparser.HTTP_CONNECT: # <<<<<<<<<<<<<< + * self._upgraded = True + * else: + */ + } /* "aiohttp/_http_parser.pyx":428 * headers = CIMultiDictProxy(self._headers) * - * if upgrade or self._cparser.method == cparser.HTTP_CONNECT: # <<<<<<<<<<<<<< - * self._upgraded = True + * if self._cparser.type == cparser.HTTP_REQUEST: # <<<<<<<<<<<<<< + * allowed = upgrade and headers.get("upgrade", "").lower() in ALLOWED_UPGRADES + * if allowed or self._cparser.method == cparser.HTTP_CONNECT: + */ + goto __pyx_L3; + } + + /* "aiohttp/_http_parser.pyx":433 + * self._upgraded = True + * else: + * if upgrade and self._cparser.status_code == 101: # <<<<<<<<<<<<<< + * self._upgraded = True * */ + /*else*/ { + __pyx_t_8 = (__pyx_v_upgrade != 0); + if (__pyx_t_8) { + } else { + __pyx_t_6 = __pyx_t_8; + goto __pyx_L10_bool_binop_done; + } + __pyx_t_8 = (__pyx_v_self->_cparser->status_code == 0x65); + __pyx_t_6 = __pyx_t_8; + __pyx_L10_bool_binop_done:; + if (__pyx_t_6) { + + /* "aiohttp/_http_parser.pyx":434 + * else: + * if upgrade and self._cparser.status_code == 101: + * self._upgraded = True # <<<<<<<<<<<<<< + * + * # do not support old websocket spec + */ + __pyx_v_self->_upgraded = 1; + + /* "aiohttp/_http_parser.pyx":433 + * self._upgraded = True + * else: + * if upgrade and self._cparser.status_code == 101: # <<<<<<<<<<<<<< + * self._upgraded = True + * + */ + } } + __pyx_L3:; - /* "aiohttp/_http_parser.pyx":432 + /* "aiohttp/_http_parser.pyx":437 * * # do not support old websocket spec * if SEC_WEBSOCKET_KEY1 in headers: # <<<<<<<<<<<<<< * raise InvalidHeader(SEC_WEBSOCKET_KEY1) * */ - __pyx_t_6 = (__Pyx_PySequence_ContainsTF(__pyx_v_7aiohttp_12_http_parser_SEC_WEBSOCKET_KEY1, __pyx_v_headers, Py_EQ)); if (unlikely((__pyx_t_6 < 0))) __PYX_ERR(0, 432, __pyx_L1_error) + __pyx_t_6 = (__Pyx_PySequence_ContainsTF(__pyx_v_7aiohttp_12_http_parser_SEC_WEBSOCKET_KEY1, __pyx_v_headers, Py_EQ)); if (unlikely((__pyx_t_6 < 0))) __PYX_ERR(0, 437, __pyx_L1_error) if (unlikely(__pyx_t_6)) { - /* "aiohttp/_http_parser.pyx":433 + /* "aiohttp/_http_parser.pyx":438 * # do not support old websocket spec * if SEC_WEBSOCKET_KEY1 in headers: * raise InvalidHeader(SEC_WEBSOCKET_KEY1) # <<<<<<<<<<<<<< * * encoding = None */ - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_InvalidHeader); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 433, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = NULL; + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_InvalidHeader); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 438, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = NULL; __pyx_t_5 = 0; #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); + if (unlikely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_3); __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); + __Pyx_DECREF_SET(__pyx_t_4, function); __pyx_t_5 = 1; } } #endif { - PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v_7aiohttp_12_http_parser_SEC_WEBSOCKET_KEY1}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 433, __pyx_L1_error) + PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_7aiohttp_12_http_parser_SEC_WEBSOCKET_KEY1}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_4, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 438, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; } __Pyx_Raise(__pyx_t_1, 0, 0, 0); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(0, 433, __pyx_L1_error) + __PYX_ERR(0, 438, __pyx_L1_error) - /* "aiohttp/_http_parser.pyx":432 + /* "aiohttp/_http_parser.pyx":437 * * # do not support old websocket spec * if SEC_WEBSOCKET_KEY1 in headers: # <<<<<<<<<<<<<< @@ -11109,7 +11263,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_comple */ } - /* "aiohttp/_http_parser.pyx":435 + /* "aiohttp/_http_parser.pyx":440 * raise InvalidHeader(SEC_WEBSOCKET_KEY1) * * encoding = None # <<<<<<<<<<<<<< @@ -11119,7 +11273,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_comple __Pyx_INCREF(Py_None); __pyx_v_encoding = Py_None; - /* "aiohttp/_http_parser.pyx":436 + /* "aiohttp/_http_parser.pyx":441 * * encoding = None * enc = self._content_encoding # <<<<<<<<<<<<<< @@ -11131,7 +11285,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_comple __pyx_v_enc = __pyx_t_1; __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":437 + /* "aiohttp/_http_parser.pyx":442 * encoding = None * enc = self._content_encoding * if enc is not None: # <<<<<<<<<<<<<< @@ -11141,7 +11295,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_comple __pyx_t_6 = (__pyx_v_enc != Py_None); if (__pyx_t_6) { - /* "aiohttp/_http_parser.pyx":438 + /* "aiohttp/_http_parser.pyx":443 * enc = self._content_encoding * if enc is not None: * self._content_encoding = None # <<<<<<<<<<<<<< @@ -11154,19 +11308,19 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_comple __Pyx_DECREF(__pyx_v_self->_content_encoding); __pyx_v_self->_content_encoding = ((PyObject*)Py_None); - /* "aiohttp/_http_parser.pyx":439 + /* "aiohttp/_http_parser.pyx":444 * if enc is not None: * self._content_encoding = None * enc = enc.lower() # <<<<<<<<<<<<<< * if enc in ('gzip', 'deflate', 'br'): * encoding = enc */ - __pyx_t_1 = __Pyx_CallUnboundCMethod0(&__pyx_umethod_PyUnicode_Type_lower, __pyx_v_enc); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 439, __pyx_L1_error) + __pyx_t_1 = __Pyx_CallUnboundCMethod0(&__pyx_umethod_PyUnicode_Type_lower, __pyx_v_enc); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 444, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF_SET(__pyx_v_enc, __pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":440 + /* "aiohttp/_http_parser.pyx":445 * self._content_encoding = None * enc = enc.lower() * if enc in ('gzip', 'deflate', 'br'): # <<<<<<<<<<<<<< @@ -11175,26 +11329,26 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_comple */ __Pyx_INCREF(__pyx_v_enc); __pyx_t_1 = __pyx_v_enc; - __pyx_t_7 = (__Pyx_PyUnicode_Equals(__pyx_t_1, __pyx_n_u_gzip, Py_EQ)); if (unlikely((__pyx_t_7 < 0))) __PYX_ERR(0, 440, __pyx_L1_error) - if (!__pyx_t_7) { + __pyx_t_8 = (__Pyx_PyUnicode_Equals(__pyx_t_1, __pyx_n_u_gzip, Py_EQ)); if (unlikely((__pyx_t_8 < 0))) __PYX_ERR(0, 445, __pyx_L1_error) + if (!__pyx_t_8) { } else { - __pyx_t_6 = __pyx_t_7; - goto __pyx_L9_bool_binop_done; + __pyx_t_6 = __pyx_t_8; + goto __pyx_L15_bool_binop_done; } - __pyx_t_7 = (__Pyx_PyUnicode_Equals(__pyx_t_1, __pyx_n_u_deflate, Py_EQ)); if (unlikely((__pyx_t_7 < 0))) __PYX_ERR(0, 440, __pyx_L1_error) - if (!__pyx_t_7) { + __pyx_t_8 = (__Pyx_PyUnicode_Equals(__pyx_t_1, __pyx_n_u_deflate, Py_EQ)); if (unlikely((__pyx_t_8 < 0))) __PYX_ERR(0, 445, __pyx_L1_error) + if (!__pyx_t_8) { } else { - __pyx_t_6 = __pyx_t_7; - goto __pyx_L9_bool_binop_done; + __pyx_t_6 = __pyx_t_8; + goto __pyx_L15_bool_binop_done; } - __pyx_t_7 = (__Pyx_PyUnicode_Equals(__pyx_t_1, __pyx_n_u_br, Py_EQ)); if (unlikely((__pyx_t_7 < 0))) __PYX_ERR(0, 440, __pyx_L1_error) - __pyx_t_6 = __pyx_t_7; - __pyx_L9_bool_binop_done:; + __pyx_t_8 = (__Pyx_PyUnicode_Equals(__pyx_t_1, __pyx_n_u_br, Py_EQ)); if (unlikely((__pyx_t_8 < 0))) __PYX_ERR(0, 445, __pyx_L1_error) + __pyx_t_6 = __pyx_t_8; + __pyx_L15_bool_binop_done:; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_7 = __pyx_t_6; - if (__pyx_t_7) { + __pyx_t_8 = __pyx_t_6; + if (__pyx_t_8) { - /* "aiohttp/_http_parser.pyx":441 + /* "aiohttp/_http_parser.pyx":446 * enc = enc.lower() * if enc in ('gzip', 'deflate', 'br'): * encoding = enc # <<<<<<<<<<<<<< @@ -11204,7 +11358,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_comple __Pyx_INCREF(__pyx_v_enc); __Pyx_DECREF_SET(__pyx_v_encoding, __pyx_v_enc); - /* "aiohttp/_http_parser.pyx":440 + /* "aiohttp/_http_parser.pyx":445 * self._content_encoding = None * enc = enc.lower() * if enc in ('gzip', 'deflate', 'br'): # <<<<<<<<<<<<<< @@ -11213,7 +11367,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_comple */ } - /* "aiohttp/_http_parser.pyx":437 + /* "aiohttp/_http_parser.pyx":442 * encoding = None * enc = self._content_encoding * if enc is not None: # <<<<<<<<<<<<<< @@ -11222,18 +11376,30 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_comple */ } - /* "aiohttp/_http_parser.pyx":443 + /* "aiohttp/_http_parser.pyx":448 * encoding = enc * * if self._cparser.type == cparser.HTTP_REQUEST: # <<<<<<<<<<<<<< + * method = http_method_str(self._cparser.method) * msg = _new_request_message( - * method, self._path, */ - __pyx_t_7 = (__pyx_v_self->_cparser->type == HTTP_REQUEST); - if (__pyx_t_7) { + __pyx_t_8 = (__pyx_v_self->_cparser->type == HTTP_REQUEST); + if (__pyx_t_8) { - /* "aiohttp/_http_parser.pyx":445 + /* "aiohttp/_http_parser.pyx":449 + * * if self._cparser.type == cparser.HTTP_REQUEST: + * method = http_method_str(self._cparser.method) # <<<<<<<<<<<<<< + * msg = _new_request_message( + * method, self._path, + */ + __pyx_t_1 = __pyx_f_7aiohttp_12_http_parser_http_method_str(__pyx_v_self->_cparser->method); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 449, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_method = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":451 + * method = http_method_str(self._cparser.method) * msg = _new_request_message( * method, self._path, # <<<<<<<<<<<<<< * self.http_version(), headers, raw_headers, @@ -11242,52 +11408,52 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_comple __pyx_t_1 = __pyx_v_self->_path; __Pyx_INCREF(__pyx_t_1); - /* "aiohttp/_http_parser.pyx":446 + /* "aiohttp/_http_parser.pyx":452 * msg = _new_request_message( * method, self._path, * self.http_version(), headers, raw_headers, # <<<<<<<<<<<<<< * should_close, encoding, upgrade, chunked, self._url) * else: */ - __pyx_t_3 = __pyx_f_7aiohttp_12_http_parser_10HttpParser_http_version(__pyx_v_self); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 446, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __pyx_f_7aiohttp_12_http_parser_10HttpParser_http_version(__pyx_v_self); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 452, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); - /* "aiohttp/_http_parser.pyx":447 + /* "aiohttp/_http_parser.pyx":453 * method, self._path, * self.http_version(), headers, raw_headers, * should_close, encoding, upgrade, chunked, self._url) # <<<<<<<<<<<<<< * else: * msg = _new_response_message( */ - __pyx_t_4 = __pyx_v_self->_url; - __Pyx_INCREF(__pyx_t_4); + __pyx_t_3 = __pyx_v_self->_url; + __Pyx_INCREF(__pyx_t_3); - /* "aiohttp/_http_parser.pyx":444 - * + /* "aiohttp/_http_parser.pyx":450 * if self._cparser.type == cparser.HTTP_REQUEST: + * method = http_method_str(self._cparser.method) * msg = _new_request_message( # <<<<<<<<<<<<<< * method, self._path, * self.http_version(), headers, raw_headers, */ - __pyx_t_8 = __pyx_f_7aiohttp_12_http_parser__new_request_message(__pyx_v_method, ((PyObject*)__pyx_t_1), __pyx_t_3, __pyx_v_headers, __pyx_v_raw_headers, __pyx_v_should_close, __pyx_v_encoding, __pyx_v_upgrade, __pyx_v_chunked, __pyx_t_4); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 444, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); + __pyx_t_7 = __pyx_f_7aiohttp_12_http_parser__new_request_message(__pyx_v_method, ((PyObject*)__pyx_t_1), __pyx_t_4, __pyx_v_headers, __pyx_v_raw_headers, __pyx_v_should_close, __pyx_v_encoding, __pyx_v_upgrade, __pyx_v_chunked, __pyx_t_3); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 450, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __pyx_v_msg = __pyx_t_8; - __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_v_msg = __pyx_t_7; + __pyx_t_7 = 0; - /* "aiohttp/_http_parser.pyx":443 + /* "aiohttp/_http_parser.pyx":448 * encoding = enc * * if self._cparser.type == cparser.HTTP_REQUEST: # <<<<<<<<<<<<<< + * method = http_method_str(self._cparser.method) * msg = _new_request_message( - * method, self._path, */ - goto __pyx_L12; + goto __pyx_L18; } - /* "aiohttp/_http_parser.pyx":449 + /* "aiohttp/_http_parser.pyx":455 * should_close, encoding, upgrade, chunked, self._url) * else: * msg = _new_response_message( # <<<<<<<<<<<<<< @@ -11296,35 +11462,35 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_comple */ /*else*/ { - /* "aiohttp/_http_parser.pyx":450 + /* "aiohttp/_http_parser.pyx":456 * else: * msg = _new_response_message( * self.http_version(), self._cparser.status_code, self._reason, # <<<<<<<<<<<<<< * headers, raw_headers, should_close, encoding, * upgrade, chunked) */ - __pyx_t_8 = __pyx_f_7aiohttp_12_http_parser_10HttpParser_http_version(__pyx_v_self); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 450, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __pyx_t_4 = __pyx_v_self->_reason; - __Pyx_INCREF(__pyx_t_4); + __pyx_t_7 = __pyx_f_7aiohttp_12_http_parser_10HttpParser_http_version(__pyx_v_self); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 456, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_3 = __pyx_v_self->_reason; + __Pyx_INCREF(__pyx_t_3); - /* "aiohttp/_http_parser.pyx":449 + /* "aiohttp/_http_parser.pyx":455 * should_close, encoding, upgrade, chunked, self._url) * else: * msg = _new_response_message( # <<<<<<<<<<<<<< * self.http_version(), self._cparser.status_code, self._reason, * headers, raw_headers, should_close, encoding, */ - __pyx_t_3 = __pyx_f_7aiohttp_12_http_parser__new_response_message(__pyx_t_8, __pyx_v_self->_cparser->status_code, ((PyObject*)__pyx_t_4), __pyx_v_headers, __pyx_v_raw_headers, __pyx_v_should_close, __pyx_v_encoding, __pyx_v_upgrade, __pyx_v_chunked); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 449, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __pyx_v_msg = __pyx_t_3; - __pyx_t_3 = 0; + __pyx_t_4 = __pyx_f_7aiohttp_12_http_parser__new_response_message(__pyx_t_7, __pyx_v_self->_cparser->status_code, ((PyObject*)__pyx_t_3), __pyx_v_headers, __pyx_v_raw_headers, __pyx_v_should_close, __pyx_v_encoding, __pyx_v_upgrade, __pyx_v_chunked); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 455, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_v_msg = __pyx_t_4; + __pyx_t_4 = 0; } - __pyx_L12:; + __pyx_L18:; - /* "aiohttp/_http_parser.pyx":455 + /* "aiohttp/_http_parser.pyx":461 * * if ( * ULLONG_MAX > self._cparser.content_length > 0 or chunked or # <<<<<<<<<<<<<< @@ -11337,17 +11503,17 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_comple } if (!__pyx_t_6) { } else { - __pyx_t_7 = __pyx_t_6; - goto __pyx_L14_bool_binop_done; + __pyx_t_8 = __pyx_t_6; + goto __pyx_L20_bool_binop_done; } __pyx_t_6 = (__pyx_v_chunked != 0); if (!__pyx_t_6) { } else { - __pyx_t_7 = __pyx_t_6; - goto __pyx_L14_bool_binop_done; + __pyx_t_8 = __pyx_t_6; + goto __pyx_L20_bool_binop_done; } - /* "aiohttp/_http_parser.pyx":456 + /* "aiohttp/_http_parser.pyx":462 * if ( * ULLONG_MAX > self._cparser.content_length > 0 or chunked or * self._cparser.method == cparser.HTTP_CONNECT or # <<<<<<<<<<<<<< @@ -11357,11 +11523,11 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_comple __pyx_t_6 = (__pyx_v_self->_cparser->method == HTTP_CONNECT); if (!__pyx_t_6) { } else { - __pyx_t_7 = __pyx_t_6; - goto __pyx_L14_bool_binop_done; + __pyx_t_8 = __pyx_t_6; + goto __pyx_L20_bool_binop_done; } - /* "aiohttp/_http_parser.pyx":457 + /* "aiohttp/_http_parser.pyx":463 * ULLONG_MAX > self._cparser.content_length > 0 or chunked or * self._cparser.method == cparser.HTTP_CONNECT or * (self._cparser.status_code >= 199 and # <<<<<<<<<<<<<< @@ -11371,11 +11537,11 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_comple __pyx_t_6 = (__pyx_v_self->_cparser->status_code >= 0xC7); if (__pyx_t_6) { } else { - __pyx_t_7 = __pyx_t_6; - goto __pyx_L14_bool_binop_done; + __pyx_t_8 = __pyx_t_6; + goto __pyx_L20_bool_binop_done; } - /* "aiohttp/_http_parser.pyx":458 + /* "aiohttp/_http_parser.pyx":464 * self._cparser.method == cparser.HTTP_CONNECT or * (self._cparser.status_code >= 199 and * self._cparser.content_length == 0 and # <<<<<<<<<<<<<< @@ -11385,91 +11551,91 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_comple __pyx_t_6 = (__pyx_v_self->_cparser->content_length == 0); if (__pyx_t_6) { } else { - __pyx_t_7 = __pyx_t_6; - goto __pyx_L14_bool_binop_done; + __pyx_t_8 = __pyx_t_6; + goto __pyx_L20_bool_binop_done; } - /* "aiohttp/_http_parser.pyx":459 + /* "aiohttp/_http_parser.pyx":465 * (self._cparser.status_code >= 199 and * self._cparser.content_length == 0 and * self._read_until_eof) # <<<<<<<<<<<<<< * ): * payload = StreamReader( */ - __pyx_t_7 = __pyx_v_self->_read_until_eof; - __pyx_L14_bool_binop_done:; + __pyx_t_8 = __pyx_v_self->_read_until_eof; + __pyx_L20_bool_binop_done:; - /* "aiohttp/_http_parser.pyx":454 + /* "aiohttp/_http_parser.pyx":460 * upgrade, chunked) * * if ( # <<<<<<<<<<<<<< * ULLONG_MAX > self._cparser.content_length > 0 or chunked or * self._cparser.method == cparser.HTTP_CONNECT or */ - if (__pyx_t_7) { + if (__pyx_t_8) { - /* "aiohttp/_http_parser.pyx":461 + /* "aiohttp/_http_parser.pyx":467 * self._read_until_eof) * ): * payload = StreamReader( # <<<<<<<<<<<<<< * self._protocol, timer=self._timer, loop=self._loop, * limit=self._limit) */ - __pyx_t_3 = PyTuple_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 461, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyTuple_New(1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 467, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); __Pyx_INCREF(__pyx_v_self->_protocol); __Pyx_GIVEREF(__pyx_v_self->_protocol); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_self->_protocol)) __PYX_ERR(0, 461, __pyx_L1_error); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_self->_protocol)) __PYX_ERR(0, 467, __pyx_L1_error); - /* "aiohttp/_http_parser.pyx":462 + /* "aiohttp/_http_parser.pyx":468 * ): * payload = StreamReader( * self._protocol, timer=self._timer, loop=self._loop, # <<<<<<<<<<<<<< * limit=self._limit) * else: */ - __pyx_t_4 = __Pyx_PyDict_NewPresized(3); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 462, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - if (PyDict_SetItem(__pyx_t_4, __pyx_n_s_timer, __pyx_v_self->_timer) < 0) __PYX_ERR(0, 462, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_4, __pyx_n_s_loop, __pyx_v_self->_loop) < 0) __PYX_ERR(0, 462, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyDict_NewPresized(3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 468, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_timer, __pyx_v_self->_timer) < 0) __PYX_ERR(0, 468, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_loop, __pyx_v_self->_loop) < 0) __PYX_ERR(0, 468, __pyx_L1_error) - /* "aiohttp/_http_parser.pyx":463 + /* "aiohttp/_http_parser.pyx":469 * payload = StreamReader( * self._protocol, timer=self._timer, loop=self._loop, * limit=self._limit) # <<<<<<<<<<<<<< * else: * payload = EMPTY_PAYLOAD */ - __pyx_t_8 = __Pyx_PyInt_From_int(__pyx_v_self->_limit); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 463, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - if (PyDict_SetItem(__pyx_t_4, __pyx_n_s_limit, __pyx_t_8) < 0) __PYX_ERR(0, 462, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_7 = __Pyx_PyInt_From_int(__pyx_v_self->_limit); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 469, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_limit, __pyx_t_7) < 0) __PYX_ERR(0, 468, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - /* "aiohttp/_http_parser.pyx":461 + /* "aiohttp/_http_parser.pyx":467 * self._read_until_eof) * ): * payload = StreamReader( # <<<<<<<<<<<<<< * self._protocol, timer=self._timer, loop=self._loop, * limit=self._limit) */ - __pyx_t_8 = __Pyx_PyObject_Call(__pyx_v_7aiohttp_12_http_parser_StreamReader, __pyx_t_3, __pyx_t_4); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 461, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_Call(__pyx_v_7aiohttp_12_http_parser_StreamReader, __pyx_t_4, __pyx_t_3); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 467, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __pyx_v_payload = __pyx_t_8; - __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_v_payload = __pyx_t_7; + __pyx_t_7 = 0; - /* "aiohttp/_http_parser.pyx":454 + /* "aiohttp/_http_parser.pyx":460 * upgrade, chunked) * * if ( # <<<<<<<<<<<<<< * ULLONG_MAX > self._cparser.content_length > 0 or chunked or * self._cparser.method == cparser.HTTP_CONNECT or */ - goto __pyx_L13; + goto __pyx_L19; } - /* "aiohttp/_http_parser.pyx":465 + /* "aiohttp/_http_parser.pyx":471 * limit=self._limit) * else: * payload = EMPTY_PAYLOAD # <<<<<<<<<<<<<< @@ -11480,9 +11646,9 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_comple __Pyx_INCREF(__pyx_v_7aiohttp_12_http_parser_EMPTY_PAYLOAD); __pyx_v_payload = __pyx_v_7aiohttp_12_http_parser_EMPTY_PAYLOAD; } - __pyx_L13:; + __pyx_L19:; - /* "aiohttp/_http_parser.pyx":467 + /* "aiohttp/_http_parser.pyx":473 * payload = EMPTY_PAYLOAD * * self._payload = payload # <<<<<<<<<<<<<< @@ -11495,7 +11661,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_comple __Pyx_DECREF(__pyx_v_self->_payload); __pyx_v_self->_payload = __pyx_v_payload; - /* "aiohttp/_http_parser.pyx":468 + /* "aiohttp/_http_parser.pyx":474 * * self._payload = payload * if encoding is not None and self._auto_decompress: # <<<<<<<<<<<<<< @@ -11505,14 +11671,14 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_comple __pyx_t_6 = (__pyx_v_encoding != Py_None); if (__pyx_t_6) { } else { - __pyx_t_7 = __pyx_t_6; - goto __pyx_L21_bool_binop_done; + __pyx_t_8 = __pyx_t_6; + goto __pyx_L27_bool_binop_done; } - __pyx_t_7 = __pyx_v_self->_auto_decompress; - __pyx_L21_bool_binop_done:; - if (__pyx_t_7) { + __pyx_t_8 = __pyx_v_self->_auto_decompress; + __pyx_L27_bool_binop_done:; + if (__pyx_t_8) { - /* "aiohttp/_http_parser.pyx":469 + /* "aiohttp/_http_parser.pyx":475 * self._payload = payload * if encoding is not None and self._auto_decompress: * self._payload = DeflateBuffer(payload, encoding) # <<<<<<<<<<<<<< @@ -11520,35 +11686,35 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_comple * if not self._response_with_body: */ __Pyx_INCREF(__pyx_v_7aiohttp_12_http_parser_DeflateBuffer); - __pyx_t_4 = __pyx_v_7aiohttp_12_http_parser_DeflateBuffer; __pyx_t_3 = NULL; + __pyx_t_3 = __pyx_v_7aiohttp_12_http_parser_DeflateBuffer; __pyx_t_4 = NULL; __pyx_t_5 = 0; #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_4))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_4); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); - __Pyx_INCREF(__pyx_t_3); + if (unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_4, function); + __Pyx_DECREF_SET(__pyx_t_3, function); __pyx_t_5 = 1; } } #endif { - PyObject *__pyx_callargs[3] = {__pyx_t_3, __pyx_v_payload, __pyx_v_encoding}; - __pyx_t_8 = __Pyx_PyObject_FastCall(__pyx_t_4, __pyx_callargs+1-__pyx_t_5, 2+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 469, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + PyObject *__pyx_callargs[3] = {__pyx_t_4, __pyx_v_payload, __pyx_v_encoding}; + __pyx_t_7 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 2+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 475, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; } - __Pyx_GIVEREF(__pyx_t_8); + __Pyx_GIVEREF(__pyx_t_7); __Pyx_GOTREF(__pyx_v_self->_payload); __Pyx_DECREF(__pyx_v_self->_payload); - __pyx_v_self->_payload = __pyx_t_8; - __pyx_t_8 = 0; + __pyx_v_self->_payload = __pyx_t_7; + __pyx_t_7 = 0; - /* "aiohttp/_http_parser.pyx":468 + /* "aiohttp/_http_parser.pyx":474 * * self._payload = payload * if encoding is not None and self._auto_decompress: # <<<<<<<<<<<<<< @@ -11557,17 +11723,17 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_comple */ } - /* "aiohttp/_http_parser.pyx":471 + /* "aiohttp/_http_parser.pyx":477 * self._payload = DeflateBuffer(payload, encoding) * * if not self._response_with_body: # <<<<<<<<<<<<<< * payload = EMPTY_PAYLOAD * */ - __pyx_t_7 = (!__pyx_v_self->_response_with_body); - if (__pyx_t_7) { + __pyx_t_8 = (!__pyx_v_self->_response_with_body); + if (__pyx_t_8) { - /* "aiohttp/_http_parser.pyx":472 + /* "aiohttp/_http_parser.pyx":478 * * if not self._response_with_body: * payload = EMPTY_PAYLOAD # <<<<<<<<<<<<<< @@ -11577,7 +11743,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_comple __Pyx_INCREF(__pyx_v_7aiohttp_12_http_parser_EMPTY_PAYLOAD); __Pyx_DECREF_SET(__pyx_v_payload, __pyx_v_7aiohttp_12_http_parser_EMPTY_PAYLOAD); - /* "aiohttp/_http_parser.pyx":471 + /* "aiohttp/_http_parser.pyx":477 * self._payload = DeflateBuffer(payload, encoding) * * if not self._response_with_body: # <<<<<<<<<<<<<< @@ -11586,7 +11752,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_comple */ } - /* "aiohttp/_http_parser.pyx":474 + /* "aiohttp/_http_parser.pyx":480 * payload = EMPTY_PAYLOAD * * self._messages.append((msg, payload)) # <<<<<<<<<<<<<< @@ -11595,20 +11761,20 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_comple */ if (unlikely(__pyx_v_self->_messages == Py_None)) { PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "append"); - __PYX_ERR(0, 474, __pyx_L1_error) + __PYX_ERR(0, 480, __pyx_L1_error) } - __pyx_t_8 = PyTuple_New(2); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 474, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); + __pyx_t_7 = PyTuple_New(2); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 480, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); __Pyx_INCREF(__pyx_v_msg); __Pyx_GIVEREF(__pyx_v_msg); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_v_msg)) __PYX_ERR(0, 474, __pyx_L1_error); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_v_msg)) __PYX_ERR(0, 480, __pyx_L1_error); __Pyx_INCREF(__pyx_v_payload); __Pyx_GIVEREF(__pyx_v_payload); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_8, 1, __pyx_v_payload)) __PYX_ERR(0, 474, __pyx_L1_error); - __pyx_t_9 = __Pyx_PyList_Append(__pyx_v_self->_messages, __pyx_t_8); if (unlikely(__pyx_t_9 == ((int)-1))) __PYX_ERR(0, 474, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_7, 1, __pyx_v_payload)) __PYX_ERR(0, 480, __pyx_L1_error); + __pyx_t_9 = __Pyx_PyList_Append(__pyx_v_self->_messages, __pyx_t_7); if (unlikely(__pyx_t_9 == ((int)-1))) __PYX_ERR(0, 480, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - /* "aiohttp/_http_parser.pyx":417 + /* "aiohttp/_http_parser.pyx":418 * self._has_value = True * * cdef _on_headers_complete(self): # <<<<<<<<<<<<<< @@ -11623,15 +11789,16 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_comple __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_7); __Pyx_AddTraceback("aiohttp._http_parser.HttpParser._on_headers_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0; __pyx_L0:; - __Pyx_XDECREF(__pyx_v_method); __Pyx_XDECREF(__pyx_v_raw_headers); __Pyx_XDECREF(__pyx_v_headers); + __Pyx_XDECREF(__pyx_v_allowed); __Pyx_XDECREF(__pyx_v_encoding); __Pyx_XDECREF(__pyx_v_enc); + __Pyx_XDECREF(__pyx_v_method); __Pyx_XDECREF(__pyx_v_msg); __Pyx_XDECREF(__pyx_v_payload); __Pyx_XGIVEREF(__pyx_r); @@ -11639,7 +11806,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_comple return __pyx_r; } -/* "aiohttp/_http_parser.pyx":476 +/* "aiohttp/_http_parser.pyx":482 * self._messages.append((msg, payload)) * * cdef _on_message_complete(self): # <<<<<<<<<<<<<< @@ -11659,14 +11826,14 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_message_comple int __pyx_clineno = 0; __Pyx_RefNannySetupContext("_on_message_complete", 1); - /* "aiohttp/_http_parser.pyx":477 + /* "aiohttp/_http_parser.pyx":483 * * cdef _on_message_complete(self): * self._payload.feed_eof() # <<<<<<<<<<<<<< * self._payload = None * */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_payload, __pyx_n_s_feed_eof); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 477, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_payload, __pyx_n_s_feed_eof); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 483, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = NULL; __pyx_t_4 = 0; @@ -11686,13 +11853,13 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_message_comple PyObject *__pyx_callargs[2] = {__pyx_t_3, NULL}; __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 0+__pyx_t_4); __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 477, __pyx_L1_error) + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 483, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; } __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":478 + /* "aiohttp/_http_parser.pyx":484 * cdef _on_message_complete(self): * self._payload.feed_eof() * self._payload = None # <<<<<<<<<<<<<< @@ -11705,7 +11872,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_message_comple __Pyx_DECREF(__pyx_v_self->_payload); __pyx_v_self->_payload = Py_None; - /* "aiohttp/_http_parser.pyx":476 + /* "aiohttp/_http_parser.pyx":482 * self._messages.append((msg, payload)) * * cdef _on_message_complete(self): # <<<<<<<<<<<<<< @@ -11728,7 +11895,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_message_comple return __pyx_r; } -/* "aiohttp/_http_parser.pyx":480 +/* "aiohttp/_http_parser.pyx":486 * self._payload = None * * cdef _on_chunk_header(self): # <<<<<<<<<<<<<< @@ -11748,14 +11915,14 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_chunk_header(s int __pyx_clineno = 0; __Pyx_RefNannySetupContext("_on_chunk_header", 1); - /* "aiohttp/_http_parser.pyx":481 + /* "aiohttp/_http_parser.pyx":487 * * cdef _on_chunk_header(self): * self._payload.begin_http_chunk_receiving() # <<<<<<<<<<<<<< * * cdef _on_chunk_complete(self): */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_payload, __pyx_n_s_begin_http_chunk_receiving); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 481, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_payload, __pyx_n_s_begin_http_chunk_receiving); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 487, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = NULL; __pyx_t_4 = 0; @@ -11775,13 +11942,13 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_chunk_header(s PyObject *__pyx_callargs[2] = {__pyx_t_3, NULL}; __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 0+__pyx_t_4); __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 481, __pyx_L1_error) + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 487, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; } __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":480 + /* "aiohttp/_http_parser.pyx":486 * self._payload = None * * cdef _on_chunk_header(self): # <<<<<<<<<<<<<< @@ -11804,7 +11971,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_chunk_header(s return __pyx_r; } -/* "aiohttp/_http_parser.pyx":483 +/* "aiohttp/_http_parser.pyx":489 * self._payload.begin_http_chunk_receiving() * * cdef _on_chunk_complete(self): # <<<<<<<<<<<<<< @@ -11824,14 +11991,14 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_chunk_complete int __pyx_clineno = 0; __Pyx_RefNannySetupContext("_on_chunk_complete", 1); - /* "aiohttp/_http_parser.pyx":484 + /* "aiohttp/_http_parser.pyx":490 * * cdef _on_chunk_complete(self): * self._payload.end_http_chunk_receiving() # <<<<<<<<<<<<<< * * cdef object _on_status_complete(self): */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_payload, __pyx_n_s_end_http_chunk_receiving); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 484, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_payload, __pyx_n_s_end_http_chunk_receiving); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 490, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = NULL; __pyx_t_4 = 0; @@ -11851,13 +12018,13 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_chunk_complete PyObject *__pyx_callargs[2] = {__pyx_t_3, NULL}; __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 0+__pyx_t_4); __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 484, __pyx_L1_error) + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 490, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; } __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":483 + /* "aiohttp/_http_parser.pyx":489 * self._payload.begin_http_chunk_receiving() * * cdef _on_chunk_complete(self): # <<<<<<<<<<<<<< @@ -11880,7 +12047,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_chunk_complete return __pyx_r; } -/* "aiohttp/_http_parser.pyx":486 +/* "aiohttp/_http_parser.pyx":492 * self._payload.end_http_chunk_receiving() * * cdef object _on_status_complete(self): # <<<<<<<<<<<<<< @@ -11900,7 +12067,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_status_complet return __pyx_r; } -/* "aiohttp/_http_parser.pyx":489 +/* "aiohttp/_http_parser.pyx":495 * pass * * cdef inline http_version(self): # <<<<<<<<<<<<<< @@ -11925,7 +12092,7 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser_http int __pyx_clineno = 0; __Pyx_RefNannySetupContext("http_version", 1); - /* "aiohttp/_http_parser.pyx":490 + /* "aiohttp/_http_parser.pyx":496 * * cdef inline http_version(self): * cdef cparser.llhttp_t* parser = self._cparser # <<<<<<<<<<<<<< @@ -11935,7 +12102,7 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser_http __pyx_t_1 = __pyx_v_self->_cparser; __pyx_v_parser = __pyx_t_1; - /* "aiohttp/_http_parser.pyx":492 + /* "aiohttp/_http_parser.pyx":498 * cdef cparser.llhttp_t* parser = self._cparser * * if parser.http_major == 1: # <<<<<<<<<<<<<< @@ -11945,7 +12112,7 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser_http __pyx_t_2 = (__pyx_v_parser->http_major == 1); if (__pyx_t_2) { - /* "aiohttp/_http_parser.pyx":493 + /* "aiohttp/_http_parser.pyx":499 * * if parser.http_major == 1: * if parser.http_minor == 0: # <<<<<<<<<<<<<< @@ -11955,7 +12122,7 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser_http switch (__pyx_v_parser->http_minor) { case 0: - /* "aiohttp/_http_parser.pyx":494 + /* "aiohttp/_http_parser.pyx":500 * if parser.http_major == 1: * if parser.http_minor == 0: * return HttpVersion10 # <<<<<<<<<<<<<< @@ -11967,7 +12134,7 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser_http __pyx_r = __pyx_v_7aiohttp_12_http_parser_HttpVersion10; goto __pyx_L0; - /* "aiohttp/_http_parser.pyx":493 + /* "aiohttp/_http_parser.pyx":499 * * if parser.http_major == 1: * if parser.http_minor == 0: # <<<<<<<<<<<<<< @@ -11977,7 +12144,7 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser_http break; case 1: - /* "aiohttp/_http_parser.pyx":496 + /* "aiohttp/_http_parser.pyx":502 * return HttpVersion10 * elif parser.http_minor == 1: * return HttpVersion11 # <<<<<<<<<<<<<< @@ -11989,7 +12156,7 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser_http __pyx_r = __pyx_v_7aiohttp_12_http_parser_HttpVersion11; goto __pyx_L0; - /* "aiohttp/_http_parser.pyx":495 + /* "aiohttp/_http_parser.pyx":501 * if parser.http_minor == 0: * return HttpVersion10 * elif parser.http_minor == 1: # <<<<<<<<<<<<<< @@ -12000,7 +12167,7 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser_http default: break; } - /* "aiohttp/_http_parser.pyx":492 + /* "aiohttp/_http_parser.pyx":498 * cdef cparser.llhttp_t* parser = self._cparser * * if parser.http_major == 1: # <<<<<<<<<<<<<< @@ -12009,7 +12176,7 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser_http */ } - /* "aiohttp/_http_parser.pyx":498 + /* "aiohttp/_http_parser.pyx":504 * return HttpVersion11 * * return HttpVersion(parser.http_major, parser.http_minor) # <<<<<<<<<<<<<< @@ -12017,9 +12184,9 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser_http * ### Public API ### */ __Pyx_XDECREF(__pyx_r); - __pyx_t_4 = __Pyx_PyInt_From_uint8_t(__pyx_v_parser->http_major); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 498, __pyx_L1_error) + __pyx_t_4 = __Pyx_PyInt_From_uint8_t(__pyx_v_parser->http_major); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 504, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); - __pyx_t_5 = __Pyx_PyInt_From_uint8_t(__pyx_v_parser->http_minor); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 498, __pyx_L1_error) + __pyx_t_5 = __Pyx_PyInt_From_uint8_t(__pyx_v_parser->http_minor); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 504, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_INCREF(__pyx_v_7aiohttp_12_http_parser_HttpVersion); __pyx_t_6 = __pyx_v_7aiohttp_12_http_parser_HttpVersion; __pyx_t_7 = NULL; @@ -12042,7 +12209,7 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser_http __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 498, __pyx_L1_error) + if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 504, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; } @@ -12050,7 +12217,7 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser_http __pyx_t_3 = 0; goto __pyx_L0; - /* "aiohttp/_http_parser.pyx":489 + /* "aiohttp/_http_parser.pyx":495 * pass * * cdef inline http_version(self): # <<<<<<<<<<<<<< @@ -12073,7 +12240,7 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser_http return __pyx_r; } -/* "aiohttp/_http_parser.pyx":502 +/* "aiohttp/_http_parser.pyx":508 * ### Public API ### * * def feed_eof(self): # <<<<<<<<<<<<<< @@ -12137,7 +12304,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_4feed_eof(struct int __pyx_clineno = 0; __Pyx_RefNannySetupContext("feed_eof", 1); - /* "aiohttp/_http_parser.pyx":505 + /* "aiohttp/_http_parser.pyx":511 * cdef bytes desc * * if self._payload is not None: # <<<<<<<<<<<<<< @@ -12147,7 +12314,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_4feed_eof(struct __pyx_t_1 = (__pyx_v_self->_payload != Py_None); if (__pyx_t_1) { - /* "aiohttp/_http_parser.pyx":506 + /* "aiohttp/_http_parser.pyx":512 * * if self._payload is not None: * if self._cparser.flags & cparser.F_CHUNKED: # <<<<<<<<<<<<<< @@ -12157,14 +12324,14 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_4feed_eof(struct __pyx_t_1 = ((__pyx_v_self->_cparser->flags & F_CHUNKED) != 0); if (unlikely(__pyx_t_1)) { - /* "aiohttp/_http_parser.pyx":507 + /* "aiohttp/_http_parser.pyx":513 * if self._payload is not None: * if self._cparser.flags & cparser.F_CHUNKED: * raise TransferEncodingError( # <<<<<<<<<<<<<< * "Not enough data for satisfy transfer length header.") * elif self._cparser.flags & cparser.F_CONTENT_LENGTH: */ - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_TransferEncodingError); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 507, __pyx_L1_error) + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_TransferEncodingError); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 513, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = NULL; __pyx_t_5 = 0; @@ -12184,15 +12351,15 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_4feed_eof(struct PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_kp_u_Not_enough_data_for_satisfy_tran}; __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 507, __pyx_L1_error) + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 513, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; } __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __PYX_ERR(0, 507, __pyx_L1_error) + __PYX_ERR(0, 513, __pyx_L1_error) - /* "aiohttp/_http_parser.pyx":506 + /* "aiohttp/_http_parser.pyx":512 * * if self._payload is not None: * if self._cparser.flags & cparser.F_CHUNKED: # <<<<<<<<<<<<<< @@ -12201,7 +12368,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_4feed_eof(struct */ } - /* "aiohttp/_http_parser.pyx":509 + /* "aiohttp/_http_parser.pyx":515 * raise TransferEncodingError( * "Not enough data for satisfy transfer length header.") * elif self._cparser.flags & cparser.F_CONTENT_LENGTH: # <<<<<<<<<<<<<< @@ -12211,14 +12378,14 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_4feed_eof(struct __pyx_t_1 = ((__pyx_v_self->_cparser->flags & F_CONTENT_LENGTH) != 0); if (unlikely(__pyx_t_1)) { - /* "aiohttp/_http_parser.pyx":510 + /* "aiohttp/_http_parser.pyx":516 * "Not enough data for satisfy transfer length header.") * elif self._cparser.flags & cparser.F_CONTENT_LENGTH: * raise ContentLengthError( # <<<<<<<<<<<<<< * "Not enough data for satisfy content length header.") * elif cparser.llhttp_get_errno(self._cparser) != cparser.HPE_OK: */ - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_ContentLengthError); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 510, __pyx_L1_error) + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_ContentLengthError); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 516, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = NULL; __pyx_t_5 = 0; @@ -12238,15 +12405,15 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_4feed_eof(struct PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_kp_u_Not_enough_data_for_satisfy_cont}; __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 510, __pyx_L1_error) + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 516, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; } __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __PYX_ERR(0, 510, __pyx_L1_error) + __PYX_ERR(0, 516, __pyx_L1_error) - /* "aiohttp/_http_parser.pyx":509 + /* "aiohttp/_http_parser.pyx":515 * raise TransferEncodingError( * "Not enough data for satisfy transfer length header.") * elif self._cparser.flags & cparser.F_CONTENT_LENGTH: # <<<<<<<<<<<<<< @@ -12255,7 +12422,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_4feed_eof(struct */ } - /* "aiohttp/_http_parser.pyx":512 + /* "aiohttp/_http_parser.pyx":518 * raise ContentLengthError( * "Not enough data for satisfy content length header.") * elif cparser.llhttp_get_errno(self._cparser) != cparser.HPE_OK: # <<<<<<<<<<<<<< @@ -12265,28 +12432,28 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_4feed_eof(struct __pyx_t_1 = (llhttp_get_errno(__pyx_v_self->_cparser) != HPE_OK); if (unlikely(__pyx_t_1)) { - /* "aiohttp/_http_parser.pyx":513 + /* "aiohttp/_http_parser.pyx":519 * "Not enough data for satisfy content length header.") * elif cparser.llhttp_get_errno(self._cparser) != cparser.HPE_OK: * desc = cparser.llhttp_get_error_reason(self._cparser) # <<<<<<<<<<<<<< * raise PayloadEncodingError(desc.decode('latin-1')) * else: */ - __pyx_t_2 = __Pyx_PyBytes_FromString(llhttp_get_error_reason(__pyx_v_self->_cparser)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 513, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyBytes_FromString(llhttp_get_error_reason(__pyx_v_self->_cparser)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 519, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_v_desc = ((PyObject*)__pyx_t_2); __pyx_t_2 = 0; - /* "aiohttp/_http_parser.pyx":514 + /* "aiohttp/_http_parser.pyx":520 * elif cparser.llhttp_get_errno(self._cparser) != cparser.HPE_OK: * desc = cparser.llhttp_get_error_reason(self._cparser) * raise PayloadEncodingError(desc.decode('latin-1')) # <<<<<<<<<<<<<< * else: * self._payload.feed_eof() */ - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_PayloadEncodingError); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 514, __pyx_L1_error) + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_PayloadEncodingError); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 520, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = __Pyx_decode_bytes(__pyx_v_desc, 0, PY_SSIZE_T_MAX, NULL, NULL, PyUnicode_DecodeLatin1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 514, __pyx_L1_error) + __pyx_t_4 = __Pyx_decode_bytes(__pyx_v_desc, 0, PY_SSIZE_T_MAX, NULL, NULL, PyUnicode_DecodeLatin1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 520, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_6 = NULL; __pyx_t_5 = 0; @@ -12307,15 +12474,15 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_4feed_eof(struct __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 514, __pyx_L1_error) + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 520, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; } __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __PYX_ERR(0, 514, __pyx_L1_error) + __PYX_ERR(0, 520, __pyx_L1_error) - /* "aiohttp/_http_parser.pyx":512 + /* "aiohttp/_http_parser.pyx":518 * raise ContentLengthError( * "Not enough data for satisfy content length header.") * elif cparser.llhttp_get_errno(self._cparser) != cparser.HPE_OK: # <<<<<<<<<<<<<< @@ -12324,7 +12491,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_4feed_eof(struct */ } - /* "aiohttp/_http_parser.pyx":516 + /* "aiohttp/_http_parser.pyx":522 * raise PayloadEncodingError(desc.decode('latin-1')) * else: * self._payload.feed_eof() # <<<<<<<<<<<<<< @@ -12332,7 +12499,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_4feed_eof(struct * self._on_headers_complete() */ /*else*/ { - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_payload, __pyx_n_s_feed_eof); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 516, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_payload, __pyx_n_s_feed_eof); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 522, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = NULL; __pyx_t_5 = 0; @@ -12352,14 +12519,14 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_4feed_eof(struct PyObject *__pyx_callargs[2] = {__pyx_t_4, NULL}; __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 0+__pyx_t_5); __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 516, __pyx_L1_error) + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 522, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; } __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; } - /* "aiohttp/_http_parser.pyx":505 + /* "aiohttp/_http_parser.pyx":511 * cdef bytes desc * * if self._payload is not None: # <<<<<<<<<<<<<< @@ -12369,7 +12536,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_4feed_eof(struct goto __pyx_L3; } - /* "aiohttp/_http_parser.pyx":517 + /* "aiohttp/_http_parser.pyx":523 * else: * self._payload.feed_eof() * elif self._started: # <<<<<<<<<<<<<< @@ -12378,18 +12545,18 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_4feed_eof(struct */ if (__pyx_v_self->_started) { - /* "aiohttp/_http_parser.pyx":518 + /* "aiohttp/_http_parser.pyx":524 * self._payload.feed_eof() * elif self._started: * self._on_headers_complete() # <<<<<<<<<<<<<< * if self._messages: * return self._messages[-1][0] */ - __pyx_t_2 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self->__pyx_vtab)->_on_headers_complete(__pyx_v_self); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 518, __pyx_L1_error) + __pyx_t_2 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self->__pyx_vtab)->_on_headers_complete(__pyx_v_self); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 524, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - /* "aiohttp/_http_parser.pyx":519 + /* "aiohttp/_http_parser.pyx":525 * elif self._started: * self._on_headers_complete() * if self._messages: # <<<<<<<<<<<<<< @@ -12399,7 +12566,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_4feed_eof(struct __pyx_t_1 = (__pyx_v_self->_messages != Py_None)&&(PyList_GET_SIZE(__pyx_v_self->_messages) != 0); if (__pyx_t_1) { - /* "aiohttp/_http_parser.pyx":520 + /* "aiohttp/_http_parser.pyx":526 * self._on_headers_complete() * if self._messages: * return self._messages[-1][0] # <<<<<<<<<<<<<< @@ -12409,18 +12576,18 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_4feed_eof(struct __Pyx_XDECREF(__pyx_r); if (unlikely(__pyx_v_self->_messages == Py_None)) { PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(0, 520, __pyx_L1_error) + __PYX_ERR(0, 526, __pyx_L1_error) } - __pyx_t_2 = __Pyx_GetItemInt_List(__pyx_v_self->_messages, -1L, long, 1, __Pyx_PyInt_From_long, 1, 1, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 520, __pyx_L1_error) + __pyx_t_2 = __Pyx_GetItemInt_List(__pyx_v_self->_messages, -1L, long, 1, __Pyx_PyInt_From_long, 1, 1, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 526, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = __Pyx_GetItemInt(__pyx_t_2, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 520, __pyx_L1_error) + __pyx_t_3 = __Pyx_GetItemInt(__pyx_t_2, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 526, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_r = __pyx_t_3; __pyx_t_3 = 0; goto __pyx_L0; - /* "aiohttp/_http_parser.pyx":519 + /* "aiohttp/_http_parser.pyx":525 * elif self._started: * self._on_headers_complete() * if self._messages: # <<<<<<<<<<<<<< @@ -12429,7 +12596,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_4feed_eof(struct */ } - /* "aiohttp/_http_parser.pyx":517 + /* "aiohttp/_http_parser.pyx":523 * else: * self._payload.feed_eof() * elif self._started: # <<<<<<<<<<<<<< @@ -12439,7 +12606,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_4feed_eof(struct } __pyx_L3:; - /* "aiohttp/_http_parser.pyx":502 + /* "aiohttp/_http_parser.pyx":508 * ### Public API ### * * def feed_eof(self): # <<<<<<<<<<<<<< @@ -12464,7 +12631,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_4feed_eof(struct return __pyx_r; } -/* "aiohttp/_http_parser.pyx":522 +/* "aiohttp/_http_parser.pyx":528 * return self._messages[-1][0] * * def feed_data(self, data): # <<<<<<<<<<<<<< @@ -12525,12 +12692,12 @@ PyObject *__pyx_args, PyObject *__pyx_kwds (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 522, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 528, __pyx_L3_error) else goto __pyx_L5_argtuple_error; } if (unlikely(kw_args > 0)) { const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "feed_data") < 0)) __PYX_ERR(0, 522, __pyx_L3_error) + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "feed_data") < 0)) __PYX_ERR(0, 528, __pyx_L3_error) } } else if (unlikely(__pyx_nargs != 1)) { goto __pyx_L5_argtuple_error; @@ -12541,7 +12708,7 @@ PyObject *__pyx_args, PyObject *__pyx_kwds } goto __pyx_L6_skip; __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("feed_data", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 522, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("feed_data", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 528, __pyx_L3_error) __pyx_L6_skip:; goto __pyx_L4_argument_unpacking_done; __pyx_L3_error:; @@ -12592,16 +12759,16 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(struct __Pyx_RefNannySetupContext("feed_data", 0); __Pyx_INCREF(__pyx_v_data); - /* "aiohttp/_http_parser.pyx":528 + /* "aiohttp/_http_parser.pyx":534 * cdef cparser.llhttp_errno_t errno * * PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE) # <<<<<<<<<<<<<< * data_len = self.py_buf.len * */ - __pyx_t_1 = PyObject_GetBuffer(__pyx_v_data, (&__pyx_v_self->py_buf), PyBUF_SIMPLE); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 528, __pyx_L1_error) + __pyx_t_1 = PyObject_GetBuffer(__pyx_v_data, (&__pyx_v_self->py_buf), PyBUF_SIMPLE); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 534, __pyx_L1_error) - /* "aiohttp/_http_parser.pyx":529 + /* "aiohttp/_http_parser.pyx":535 * * PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE) * data_len = self.py_buf.len # <<<<<<<<<<<<<< @@ -12610,7 +12777,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(struct */ __pyx_v_data_len = ((size_t)__pyx_v_self->py_buf.len); - /* "aiohttp/_http_parser.pyx":531 + /* "aiohttp/_http_parser.pyx":537 * data_len = self.py_buf.len * * errno = cparser.llhttp_execute( # <<<<<<<<<<<<<< @@ -12619,7 +12786,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(struct */ __pyx_v_errno = llhttp_execute(__pyx_v_self->_cparser, ((char *)__pyx_v_self->py_buf.buf), __pyx_v_data_len); - /* "aiohttp/_http_parser.pyx":536 + /* "aiohttp/_http_parser.pyx":542 * data_len) * * if errno is cparser.HPE_PAUSED_UPGRADE: # <<<<<<<<<<<<<< @@ -12629,7 +12796,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(struct __pyx_t_2 = (__pyx_v_errno == HPE_PAUSED_UPGRADE); if (__pyx_t_2) { - /* "aiohttp/_http_parser.pyx":537 + /* "aiohttp/_http_parser.pyx":543 * * if errno is cparser.HPE_PAUSED_UPGRADE: * cparser.llhttp_resume_after_upgrade(self._cparser) # <<<<<<<<<<<<<< @@ -12638,7 +12805,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(struct */ llhttp_resume_after_upgrade(__pyx_v_self->_cparser); - /* "aiohttp/_http_parser.pyx":539 + /* "aiohttp/_http_parser.pyx":545 * cparser.llhttp_resume_after_upgrade(self._cparser) * * nb = cparser.llhttp_get_error_pos(self._cparser) - self.py_buf.buf # <<<<<<<<<<<<<< @@ -12647,7 +12814,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(struct */ __pyx_v_nb = (llhttp_get_error_pos(__pyx_v_self->_cparser) - ((char *)__pyx_v_self->py_buf.buf)); - /* "aiohttp/_http_parser.pyx":536 + /* "aiohttp/_http_parser.pyx":542 * data_len) * * if errno is cparser.HPE_PAUSED_UPGRADE: # <<<<<<<<<<<<<< @@ -12656,7 +12823,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(struct */ } - /* "aiohttp/_http_parser.pyx":541 + /* "aiohttp/_http_parser.pyx":547 * nb = cparser.llhttp_get_error_pos(self._cparser) - self.py_buf.buf * * PyBuffer_Release(&self.py_buf) # <<<<<<<<<<<<<< @@ -12665,7 +12832,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(struct */ PyBuffer_Release((&__pyx_v_self->py_buf)); - /* "aiohttp/_http_parser.pyx":543 + /* "aiohttp/_http_parser.pyx":549 * PyBuffer_Release(&self.py_buf) * * if errno not in (cparser.HPE_OK, cparser.HPE_PAUSED_UPGRADE): # <<<<<<<<<<<<<< @@ -12684,7 +12851,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(struct __pyx_t_3 = __pyx_t_2; if (__pyx_t_3) { - /* "aiohttp/_http_parser.pyx":544 + /* "aiohttp/_http_parser.pyx":550 * * if errno not in (cparser.HPE_OK, cparser.HPE_PAUSED_UPGRADE): * if self._payload_error == 0: # <<<<<<<<<<<<<< @@ -12694,7 +12861,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(struct __pyx_t_3 = (__pyx_v_self->_payload_error == 0); if (__pyx_t_3) { - /* "aiohttp/_http_parser.pyx":545 + /* "aiohttp/_http_parser.pyx":551 * if errno not in (cparser.HPE_OK, cparser.HPE_PAUSED_UPGRADE): * if self._payload_error == 0: * if self._last_error is not None: # <<<<<<<<<<<<<< @@ -12704,7 +12871,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(struct __pyx_t_3 = (__pyx_v_self->_last_error != Py_None); if (__pyx_t_3) { - /* "aiohttp/_http_parser.pyx":546 + /* "aiohttp/_http_parser.pyx":552 * if self._payload_error == 0: * if self._last_error is not None: * ex = self._last_error # <<<<<<<<<<<<<< @@ -12716,7 +12883,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(struct __pyx_v_ex = __pyx_t_4; __pyx_t_4 = 0; - /* "aiohttp/_http_parser.pyx":547 + /* "aiohttp/_http_parser.pyx":553 * if self._last_error is not None: * ex = self._last_error * self._last_error = None # <<<<<<<<<<<<<< @@ -12729,7 +12896,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(struct __Pyx_DECREF(__pyx_v_self->_last_error); __pyx_v_self->_last_error = Py_None; - /* "aiohttp/_http_parser.pyx":545 + /* "aiohttp/_http_parser.pyx":551 * if errno not in (cparser.HPE_OK, cparser.HPE_PAUSED_UPGRADE): * if self._payload_error == 0: * if self._last_error is not None: # <<<<<<<<<<<<<< @@ -12739,7 +12906,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(struct goto __pyx_L6; } - /* "aiohttp/_http_parser.pyx":549 + /* "aiohttp/_http_parser.pyx":555 * self._last_error = None * else: * after = cparser.llhttp_get_error_pos(self._cparser) # <<<<<<<<<<<<<< @@ -12749,103 +12916,103 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(struct /*else*/ { __pyx_v_after = llhttp_get_error_pos(__pyx_v_self->_cparser); - /* "aiohttp/_http_parser.pyx":550 + /* "aiohttp/_http_parser.pyx":556 * else: * after = cparser.llhttp_get_error_pos(self._cparser) * before = data[:after - self.py_buf.buf] # <<<<<<<<<<<<<< * after_b = after.split(b"\r\n", 1)[0] * before = before.rsplit(b"\r\n", 1)[-1] */ - __pyx_t_4 = __Pyx_PyObject_GetSlice(__pyx_v_data, 0, (__pyx_v_after - ((char *)__pyx_v_self->py_buf.buf)), NULL, NULL, NULL, 0, 1, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 550, __pyx_L1_error) + __pyx_t_4 = __Pyx_PyObject_GetSlice(__pyx_v_data, 0, (__pyx_v_after - ((char *)__pyx_v_self->py_buf.buf)), NULL, NULL, NULL, 0, 1, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 556, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_v_before = __pyx_t_4; __pyx_t_4 = 0; - /* "aiohttp/_http_parser.pyx":551 + /* "aiohttp/_http_parser.pyx":557 * after = cparser.llhttp_get_error_pos(self._cparser) * before = data[:after - self.py_buf.buf] * after_b = after.split(b"\r\n", 1)[0] # <<<<<<<<<<<<<< * before = before.rsplit(b"\r\n", 1)[-1] * data = before + after_b */ - __pyx_t_4 = __Pyx_PyBytes_FromString(__pyx_v_after); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 551, __pyx_L1_error) + __pyx_t_4 = __Pyx_PyBytes_FromString(__pyx_v_after); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 557, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); - __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_split); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 551, __pyx_L1_error) + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_split); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 557, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_tuple__5, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 551, __pyx_L1_error) + __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_tuple__7, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 557, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - __pyx_t_5 = __Pyx_GetItemInt(__pyx_t_4, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 551, __pyx_L1_error) + __pyx_t_5 = __Pyx_GetItemInt(__pyx_t_4, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 557, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __pyx_v_after_b = __pyx_t_5; __pyx_t_5 = 0; - /* "aiohttp/_http_parser.pyx":552 + /* "aiohttp/_http_parser.pyx":558 * before = data[:after - self.py_buf.buf] * after_b = after.split(b"\r\n", 1)[0] * before = before.rsplit(b"\r\n", 1)[-1] # <<<<<<<<<<<<<< * data = before + after_b * pointer = " " * (len(repr(before))-1) + "^" */ - __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_before, __pyx_n_s_rsplit); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 552, __pyx_L1_error) + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_before, __pyx_n_s_rsplit); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 558, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); - __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_tuple__5, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 552, __pyx_L1_error) + __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_tuple__7, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 558, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - __pyx_t_5 = __Pyx_GetItemInt(__pyx_t_4, -1L, long, 1, __Pyx_PyInt_From_long, 0, 1, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 552, __pyx_L1_error) + __pyx_t_5 = __Pyx_GetItemInt(__pyx_t_4, -1L, long, 1, __Pyx_PyInt_From_long, 0, 1, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 558, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_DECREF_SET(__pyx_v_before, __pyx_t_5); __pyx_t_5 = 0; - /* "aiohttp/_http_parser.pyx":553 + /* "aiohttp/_http_parser.pyx":559 * after_b = after.split(b"\r\n", 1)[0] * before = before.rsplit(b"\r\n", 1)[-1] * data = before + after_b # <<<<<<<<<<<<<< * pointer = " " * (len(repr(before))-1) + "^" * ex = parser_error_from_errno(self._cparser, data, pointer) */ - __pyx_t_5 = PyNumber_Add(__pyx_v_before, __pyx_v_after_b); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 553, __pyx_L1_error) + __pyx_t_5 = PyNumber_Add(__pyx_v_before, __pyx_v_after_b); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 559, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_DECREF_SET(__pyx_v_data, __pyx_t_5); __pyx_t_5 = 0; - /* "aiohttp/_http_parser.pyx":554 + /* "aiohttp/_http_parser.pyx":560 * before = before.rsplit(b"\r\n", 1)[-1] * data = before + after_b * pointer = " " * (len(repr(before))-1) + "^" # <<<<<<<<<<<<<< * ex = parser_error_from_errno(self._cparser, data, pointer) * self._payload = None */ - __pyx_t_5 = PyObject_Repr(__pyx_v_before); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 554, __pyx_L1_error) + __pyx_t_5 = PyObject_Repr(__pyx_v_before); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 560, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); - __pyx_t_6 = PyObject_Length(__pyx_t_5); if (unlikely(__pyx_t_6 == ((Py_ssize_t)-1))) __PYX_ERR(0, 554, __pyx_L1_error) + __pyx_t_6 = PyObject_Length(__pyx_t_5); if (unlikely(__pyx_t_6 == ((Py_ssize_t)-1))) __PYX_ERR(0, 560, __pyx_L1_error) __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - __pyx_t_5 = __Pyx_PySequence_Multiply(__pyx_kp_u__6, (__pyx_t_6 - 1)); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 554, __pyx_L1_error) + __pyx_t_5 = __Pyx_PySequence_Multiply(__pyx_kp_u__8, (__pyx_t_6 - 1)); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 560, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); - __pyx_t_4 = __Pyx_PyUnicode_ConcatInPlace(__pyx_t_5, __pyx_kp_u__7); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 554, __pyx_L1_error) + __pyx_t_4 = __Pyx_PyUnicode_ConcatInPlace(__pyx_t_5, __pyx_kp_u__9); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 560, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __pyx_v_pointer = ((PyObject*)__pyx_t_4); __pyx_t_4 = 0; - /* "aiohttp/_http_parser.pyx":555 + /* "aiohttp/_http_parser.pyx":561 * data = before + after_b * pointer = " " * (len(repr(before))-1) + "^" * ex = parser_error_from_errno(self._cparser, data, pointer) # <<<<<<<<<<<<<< * self._payload = None * raise ex */ - __pyx_t_4 = __pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(__pyx_v_self->_cparser, __pyx_v_data, __pyx_v_pointer); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 555, __pyx_L1_error) + __pyx_t_4 = __pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(__pyx_v_self->_cparser, __pyx_v_data, __pyx_v_pointer); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 561, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_v_ex = __pyx_t_4; __pyx_t_4 = 0; } __pyx_L6:; - /* "aiohttp/_http_parser.pyx":556 + /* "aiohttp/_http_parser.pyx":562 * pointer = " " * (len(repr(before))-1) + "^" * ex = parser_error_from_errno(self._cparser, data, pointer) * self._payload = None # <<<<<<<<<<<<<< @@ -12858,7 +13025,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(struct __Pyx_DECREF(__pyx_v_self->_payload); __pyx_v_self->_payload = Py_None; - /* "aiohttp/_http_parser.pyx":557 + /* "aiohttp/_http_parser.pyx":563 * ex = parser_error_from_errno(self._cparser, data, pointer) * self._payload = None * raise ex # <<<<<<<<<<<<<< @@ -12866,9 +13033,9 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(struct * if self._messages: */ __Pyx_Raise(__pyx_v_ex, 0, 0, 0); - __PYX_ERR(0, 557, __pyx_L1_error) + __PYX_ERR(0, 563, __pyx_L1_error) - /* "aiohttp/_http_parser.pyx":544 + /* "aiohttp/_http_parser.pyx":550 * * if errno not in (cparser.HPE_OK, cparser.HPE_PAUSED_UPGRADE): * if self._payload_error == 0: # <<<<<<<<<<<<<< @@ -12877,7 +13044,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(struct */ } - /* "aiohttp/_http_parser.pyx":543 + /* "aiohttp/_http_parser.pyx":549 * PyBuffer_Release(&self.py_buf) * * if errno not in (cparser.HPE_OK, cparser.HPE_PAUSED_UPGRADE): # <<<<<<<<<<<<<< @@ -12886,7 +13053,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(struct */ } - /* "aiohttp/_http_parser.pyx":559 + /* "aiohttp/_http_parser.pyx":565 * raise ex * * if self._messages: # <<<<<<<<<<<<<< @@ -12896,7 +13063,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(struct __pyx_t_3 = (__pyx_v_self->_messages != Py_None)&&(PyList_GET_SIZE(__pyx_v_self->_messages) != 0); if (__pyx_t_3) { - /* "aiohttp/_http_parser.pyx":560 + /* "aiohttp/_http_parser.pyx":566 * * if self._messages: * messages = self._messages # <<<<<<<<<<<<<< @@ -12908,14 +13075,14 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(struct __pyx_v_messages = __pyx_t_4; __pyx_t_4 = 0; - /* "aiohttp/_http_parser.pyx":561 + /* "aiohttp/_http_parser.pyx":567 * if self._messages: * messages = self._messages * self._messages = [] # <<<<<<<<<<<<<< * else: * messages = () */ - __pyx_t_4 = PyList_New(0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 561, __pyx_L1_error) + __pyx_t_4 = PyList_New(0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 567, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_GIVEREF(__pyx_t_4); __Pyx_GOTREF(__pyx_v_self->_messages); @@ -12923,7 +13090,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(struct __pyx_v_self->_messages = ((PyObject*)__pyx_t_4); __pyx_t_4 = 0; - /* "aiohttp/_http_parser.pyx":559 + /* "aiohttp/_http_parser.pyx":565 * raise ex * * if self._messages: # <<<<<<<<<<<<<< @@ -12933,7 +13100,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(struct goto __pyx_L7; } - /* "aiohttp/_http_parser.pyx":563 + /* "aiohttp/_http_parser.pyx":569 * self._messages = [] * else: * messages = () # <<<<<<<<<<<<<< @@ -12946,7 +13113,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(struct } __pyx_L7:; - /* "aiohttp/_http_parser.pyx":565 + /* "aiohttp/_http_parser.pyx":571 * messages = () * * if self._upgraded: # <<<<<<<<<<<<<< @@ -12955,32 +13122,32 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(struct */ if (__pyx_v_self->_upgraded) { - /* "aiohttp/_http_parser.pyx":566 + /* "aiohttp/_http_parser.pyx":572 * * if self._upgraded: * return messages, True, data[nb:] # <<<<<<<<<<<<<< * else: - * return messages, False, b'' + * return messages, False, b"" */ __Pyx_XDECREF(__pyx_r); - __pyx_t_4 = __Pyx_PyObject_GetSlice(__pyx_v_data, __pyx_v_nb, 0, NULL, NULL, NULL, 1, 0, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 566, __pyx_L1_error) + __pyx_t_4 = __Pyx_PyObject_GetSlice(__pyx_v_data, __pyx_v_nb, 0, NULL, NULL, NULL, 1, 0, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 572, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); - __pyx_t_5 = PyTuple_New(3); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 566, __pyx_L1_error) + __pyx_t_5 = PyTuple_New(3); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 572, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_INCREF(__pyx_v_messages); __Pyx_GIVEREF(__pyx_v_messages); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_v_messages)) __PYX_ERR(0, 566, __pyx_L1_error); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_v_messages)) __PYX_ERR(0, 572, __pyx_L1_error); __Pyx_INCREF(Py_True); __Pyx_GIVEREF(Py_True); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_5, 1, Py_True)) __PYX_ERR(0, 566, __pyx_L1_error); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_5, 1, Py_True)) __PYX_ERR(0, 572, __pyx_L1_error); __Pyx_GIVEREF(__pyx_t_4); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_t_4)) __PYX_ERR(0, 566, __pyx_L1_error); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_t_4)) __PYX_ERR(0, 572, __pyx_L1_error); __pyx_t_4 = 0; __pyx_r = __pyx_t_5; __pyx_t_5 = 0; goto __pyx_L0; - /* "aiohttp/_http_parser.pyx":565 + /* "aiohttp/_http_parser.pyx":571 * messages = () * * if self._upgraded: # <<<<<<<<<<<<<< @@ -12989,32 +13156,32 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(struct */ } - /* "aiohttp/_http_parser.pyx":568 + /* "aiohttp/_http_parser.pyx":574 * return messages, True, data[nb:] * else: - * return messages, False, b'' # <<<<<<<<<<<<<< + * return messages, False, b"" # <<<<<<<<<<<<<< * * def set_upgraded(self, val): */ /*else*/ { __Pyx_XDECREF(__pyx_r); - __pyx_t_5 = PyTuple_New(3); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 568, __pyx_L1_error) + __pyx_t_5 = PyTuple_New(3); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 574, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_INCREF(__pyx_v_messages); __Pyx_GIVEREF(__pyx_v_messages); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_v_messages)) __PYX_ERR(0, 568, __pyx_L1_error); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_v_messages)) __PYX_ERR(0, 574, __pyx_L1_error); __Pyx_INCREF(Py_False); __Pyx_GIVEREF(Py_False); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_5, 1, Py_False)) __PYX_ERR(0, 568, __pyx_L1_error); - __Pyx_INCREF(__pyx_kp_b__8); - __Pyx_GIVEREF(__pyx_kp_b__8); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_kp_b__8)) __PYX_ERR(0, 568, __pyx_L1_error); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_5, 1, Py_False)) __PYX_ERR(0, 574, __pyx_L1_error); + __Pyx_INCREF(__pyx_kp_b__4); + __Pyx_GIVEREF(__pyx_kp_b__4); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_kp_b__4)) __PYX_ERR(0, 574, __pyx_L1_error); __pyx_r = __pyx_t_5; __pyx_t_5 = 0; goto __pyx_L0; } - /* "aiohttp/_http_parser.pyx":522 + /* "aiohttp/_http_parser.pyx":528 * return self._messages[-1][0] * * def feed_data(self, data): # <<<<<<<<<<<<<< @@ -13040,8 +13207,8 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(struct return __pyx_r; } -/* "aiohttp/_http_parser.pyx":570 - * return messages, False, b'' +/* "aiohttp/_http_parser.pyx":576 + * return messages, False, b"" * * def set_upgraded(self, val): # <<<<<<<<<<<<<< * self._upgraded = val @@ -13101,12 +13268,12 @@ PyObject *__pyx_args, PyObject *__pyx_kwds (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 570, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 576, __pyx_L3_error) else goto __pyx_L5_argtuple_error; } if (unlikely(kw_args > 0)) { const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "set_upgraded") < 0)) __PYX_ERR(0, 570, __pyx_L3_error) + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "set_upgraded") < 0)) __PYX_ERR(0, 576, __pyx_L3_error) } } else if (unlikely(__pyx_nargs != 1)) { goto __pyx_L5_argtuple_error; @@ -13117,7 +13284,7 @@ PyObject *__pyx_args, PyObject *__pyx_kwds } goto __pyx_L6_skip; __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("set_upgraded", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 570, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("set_upgraded", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 576, __pyx_L3_error) __pyx_L6_skip:; goto __pyx_L4_argument_unpacking_done; __pyx_L3_error:; @@ -13153,18 +13320,18 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_8set_upgraded(str int __pyx_clineno = 0; __Pyx_RefNannySetupContext("set_upgraded", 1); - /* "aiohttp/_http_parser.pyx":571 + /* "aiohttp/_http_parser.pyx":577 * * def set_upgraded(self, val): * self._upgraded = val # <<<<<<<<<<<<<< * * */ - __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_val); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 571, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_val); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 577, __pyx_L1_error) __pyx_v_self->_upgraded = __pyx_t_1; - /* "aiohttp/_http_parser.pyx":570 - * return messages, False, b'' + /* "aiohttp/_http_parser.pyx":576 + * return messages, False, b"" * * def set_upgraded(self, val): # <<<<<<<<<<<<<< * self._upgraded = val @@ -13397,7 +13564,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_12__setstate_cyth return __pyx_r; } -/* "aiohttp/_http_parser.pyx":576 +/* "aiohttp/_http_parser.pyx":582 * cdef class HttpRequestParser(HttpParser): * * def __init__( # <<<<<<<<<<<<<< @@ -13437,7 +13604,7 @@ static int __pyx_pw_7aiohttp_12_http_parser_17HttpRequestParser_1__init__(PyObje { PyObject **__pyx_pyargnames[] = {&__pyx_n_s_protocol,&__pyx_n_s_loop,&__pyx_n_s_limit,&__pyx_n_s_timer,&__pyx_n_s_max_line_size,&__pyx_n_s_max_headers,&__pyx_n_s_max_field_size,&__pyx_n_s_payload_exception,&__pyx_n_s_response_with_body,&__pyx_n_s_read_until_eof,&__pyx_n_s_auto_decompress,0}; - /* "aiohttp/_http_parser.pyx":577 + /* "aiohttp/_http_parser.pyx":583 * * def __init__( * self, protocol, loop, int limit, timer=None, # <<<<<<<<<<<<<< @@ -13446,7 +13613,7 @@ static int __pyx_pw_7aiohttp_12_http_parser_17HttpRequestParser_1__init__(PyObje */ values[3] = __Pyx_Arg_NewRef_VARARGS(((PyObject *)Py_None)); - /* "aiohttp/_http_parser.pyx":579 + /* "aiohttp/_http_parser.pyx":585 * self, protocol, loop, int limit, timer=None, * size_t max_line_size=8190, size_t max_headers=32768, * size_t max_field_size=8190, payload_exception=None, # <<<<<<<<<<<<<< @@ -13489,7 +13656,7 @@ static int __pyx_pw_7aiohttp_12_http_parser_17HttpRequestParser_1__init__(PyObje (void)__Pyx_Arg_NewRef_VARARGS(values[0]); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 576, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 582, __pyx_L3_error) else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: @@ -13497,9 +13664,9 @@ static int __pyx_pw_7aiohttp_12_http_parser_17HttpRequestParser_1__init__(PyObje (void)__Pyx_Arg_NewRef_VARARGS(values[1]); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 576, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 582, __pyx_L3_error) else { - __Pyx_RaiseArgtupleInvalid("__init__", 0, 3, 11, 1); __PYX_ERR(0, 576, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("__init__", 0, 3, 11, 1); __PYX_ERR(0, 582, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 2: @@ -13507,70 +13674,70 @@ static int __pyx_pw_7aiohttp_12_http_parser_17HttpRequestParser_1__init__(PyObje (void)__Pyx_Arg_NewRef_VARARGS(values[2]); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 576, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 582, __pyx_L3_error) else { - __Pyx_RaiseArgtupleInvalid("__init__", 0, 3, 11, 2); __PYX_ERR(0, 576, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("__init__", 0, 3, 11, 2); __PYX_ERR(0, 582, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 3: if (kw_args > 0) { PyObject* value = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_timer); if (value) { values[3] = __Pyx_Arg_NewRef_VARARGS(value); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 576, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 582, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 4: if (kw_args > 0) { PyObject* value = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_max_line_size); if (value) { values[4] = __Pyx_Arg_NewRef_VARARGS(value); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 576, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 582, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 5: if (kw_args > 0) { PyObject* value = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_max_headers); if (value) { values[5] = __Pyx_Arg_NewRef_VARARGS(value); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 576, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 582, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 6: if (kw_args > 0) { PyObject* value = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_max_field_size); if (value) { values[6] = __Pyx_Arg_NewRef_VARARGS(value); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 576, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 582, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 7: if (kw_args > 0) { PyObject* value = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_payload_exception); if (value) { values[7] = __Pyx_Arg_NewRef_VARARGS(value); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 576, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 582, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 8: if (kw_args > 0) { PyObject* value = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_response_with_body); if (value) { values[8] = __Pyx_Arg_NewRef_VARARGS(value); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 576, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 582, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 9: if (kw_args > 0) { PyObject* value = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_read_until_eof); if (value) { values[9] = __Pyx_Arg_NewRef_VARARGS(value); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 576, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 582, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 10: if (kw_args > 0) { PyObject* value = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_auto_decompress); if (value) { values[10] = __Pyx_Arg_NewRef_VARARGS(value); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 576, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 582, __pyx_L3_error) } } if (unlikely(kw_args > 0)) { const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__init__") < 0)) __PYX_ERR(0, 576, __pyx_L3_error) + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__init__") < 0)) __PYX_ERR(0, 582, __pyx_L3_error) } } else { switch (__pyx_nargs) { @@ -13599,29 +13766,29 @@ static int __pyx_pw_7aiohttp_12_http_parser_17HttpRequestParser_1__init__(PyObje } __pyx_v_protocol = values[0]; __pyx_v_loop = values[1]; - __pyx_v_limit = __Pyx_PyInt_As_int(values[2]); if (unlikely((__pyx_v_limit == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 577, __pyx_L3_error) + __pyx_v_limit = __Pyx_PyInt_As_int(values[2]); if (unlikely((__pyx_v_limit == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 583, __pyx_L3_error) __pyx_v_timer = values[3]; if (values[4]) { - __pyx_v_max_line_size = __Pyx_PyInt_As_size_t(values[4]); if (unlikely((__pyx_v_max_line_size == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 578, __pyx_L3_error) + __pyx_v_max_line_size = __Pyx_PyInt_As_size_t(values[4]); if (unlikely((__pyx_v_max_line_size == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 584, __pyx_L3_error) } else { __pyx_v_max_line_size = ((size_t)0x1FFE); } if (values[5]) { - __pyx_v_max_headers = __Pyx_PyInt_As_size_t(values[5]); if (unlikely((__pyx_v_max_headers == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 578, __pyx_L3_error) + __pyx_v_max_headers = __Pyx_PyInt_As_size_t(values[5]); if (unlikely((__pyx_v_max_headers == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 584, __pyx_L3_error) } else { __pyx_v_max_headers = ((size_t)0x8000); } if (values[6]) { - __pyx_v_max_field_size = __Pyx_PyInt_As_size_t(values[6]); if (unlikely((__pyx_v_max_field_size == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 579, __pyx_L3_error) + __pyx_v_max_field_size = __Pyx_PyInt_As_size_t(values[6]); if (unlikely((__pyx_v_max_field_size == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 585, __pyx_L3_error) } else { __pyx_v_max_field_size = ((size_t)0x1FFE); } __pyx_v_payload_exception = values[7]; if (values[8]) { - __pyx_v_response_with_body = __Pyx_PyObject_IsTrue(values[8]); if (unlikely((__pyx_v_response_with_body == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 580, __pyx_L3_error) + __pyx_v_response_with_body = __Pyx_PyObject_IsTrue(values[8]); if (unlikely((__pyx_v_response_with_body == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 586, __pyx_L3_error) } else { - /* "aiohttp/_http_parser.pyx":580 + /* "aiohttp/_http_parser.pyx":586 * size_t max_line_size=8190, size_t max_headers=32768, * size_t max_field_size=8190, payload_exception=None, * bint response_with_body=True, bint read_until_eof=False, # <<<<<<<<<<<<<< @@ -13631,15 +13798,15 @@ static int __pyx_pw_7aiohttp_12_http_parser_17HttpRequestParser_1__init__(PyObje __pyx_v_response_with_body = ((int)1); } if (values[9]) { - __pyx_v_read_until_eof = __Pyx_PyObject_IsTrue(values[9]); if (unlikely((__pyx_v_read_until_eof == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 580, __pyx_L3_error) + __pyx_v_read_until_eof = __Pyx_PyObject_IsTrue(values[9]); if (unlikely((__pyx_v_read_until_eof == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 586, __pyx_L3_error) } else { __pyx_v_read_until_eof = ((int)0); } if (values[10]) { - __pyx_v_auto_decompress = __Pyx_PyObject_IsTrue(values[10]); if (unlikely((__pyx_v_auto_decompress == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 581, __pyx_L3_error) + __pyx_v_auto_decompress = __Pyx_PyObject_IsTrue(values[10]); if (unlikely((__pyx_v_auto_decompress == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 587, __pyx_L3_error) } else { - /* "aiohttp/_http_parser.pyx":581 + /* "aiohttp/_http_parser.pyx":587 * size_t max_field_size=8190, payload_exception=None, * bint response_with_body=True, bint read_until_eof=False, * bint auto_decompress=True, # <<<<<<<<<<<<<< @@ -13651,7 +13818,7 @@ static int __pyx_pw_7aiohttp_12_http_parser_17HttpRequestParser_1__init__(PyObje } goto __pyx_L6_skip; __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__init__", 0, 3, 11, __pyx_nargs); __PYX_ERR(0, 576, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("__init__", 0, 3, 11, __pyx_nargs); __PYX_ERR(0, 582, __pyx_L3_error) __pyx_L6_skip:; goto __pyx_L4_argument_unpacking_done; __pyx_L3_error:; @@ -13667,7 +13834,7 @@ static int __pyx_pw_7aiohttp_12_http_parser_17HttpRequestParser_1__init__(PyObje __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17HttpRequestParser___init__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParser *)__pyx_v_self), __pyx_v_protocol, __pyx_v_loop, __pyx_v_limit, __pyx_v_timer, __pyx_v_max_line_size, __pyx_v_max_headers, __pyx_v_max_field_size, __pyx_v_payload_exception, __pyx_v_response_with_body, __pyx_v_read_until_eof, __pyx_v_auto_decompress); - /* "aiohttp/_http_parser.pyx":576 + /* "aiohttp/_http_parser.pyx":582 * cdef class HttpRequestParser(HttpParser): * * def __init__( # <<<<<<<<<<<<<< @@ -13696,7 +13863,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_17HttpRequestParser___init__(struct int __pyx_clineno = 0; __Pyx_RefNannySetupContext("__init__", 1); - /* "aiohttp/_http_parser.pyx":583 + /* "aiohttp/_http_parser.pyx":589 * bint auto_decompress=True, * ): * self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer, # <<<<<<<<<<<<<< @@ -13712,11 +13879,11 @@ static int __pyx_pf_7aiohttp_12_http_parser_17HttpRequestParser___init__(struct __pyx_t_2.response_with_body = __pyx_v_response_with_body; __pyx_t_2.read_until_eof = __pyx_v_read_until_eof; __pyx_t_2.auto_decompress = __pyx_v_auto_decompress; - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpRequestParser *)__pyx_v_self->__pyx_base.__pyx_vtab)->__pyx_base._init(((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self), HTTP_REQUEST, __pyx_v_protocol, __pyx_v_loop, __pyx_v_limit, &__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 583, __pyx_L1_error) + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpRequestParser *)__pyx_v_self->__pyx_base.__pyx_vtab)->__pyx_base._init(((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self), HTTP_REQUEST, __pyx_v_protocol, __pyx_v_loop, __pyx_v_limit, &__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 589, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":576 + /* "aiohttp/_http_parser.pyx":582 * cdef class HttpRequestParser(HttpParser): * * def __init__( # <<<<<<<<<<<<<< @@ -13736,7 +13903,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_17HttpRequestParser___init__(struct return __pyx_r; } -/* "aiohttp/_http_parser.pyx":588 +/* "aiohttp/_http_parser.pyx":594 * auto_decompress) * * cdef object _on_status_complete(self): # <<<<<<<<<<<<<< @@ -13776,7 +13943,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ int __pyx_clineno = 0; __Pyx_RefNannySetupContext("_on_status_complete", 1); - /* "aiohttp/_http_parser.pyx":590 + /* "aiohttp/_http_parser.pyx":596 * cdef object _on_status_complete(self): * cdef int idx1, idx2 * if not self._buf: # <<<<<<<<<<<<<< @@ -13787,7 +13954,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ __pyx_t_2 = (!__pyx_t_1); if (__pyx_t_2) { - /* "aiohttp/_http_parser.pyx":591 + /* "aiohttp/_http_parser.pyx":597 * cdef int idx1, idx2 * if not self._buf: * return # <<<<<<<<<<<<<< @@ -13798,7 +13965,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; - /* "aiohttp/_http_parser.pyx":590 + /* "aiohttp/_http_parser.pyx":596 * cdef object _on_status_complete(self): * cdef int idx1, idx2 * if not self._buf: # <<<<<<<<<<<<<< @@ -13807,7 +13974,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ */ } - /* "aiohttp/_http_parser.pyx":592 + /* "aiohttp/_http_parser.pyx":598 * if not self._buf: * return * self._path = self._buf.decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< @@ -13816,9 +13983,9 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ */ if (unlikely(__pyx_v_self->__pyx_base._buf == Py_None)) { PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "decode"); - __PYX_ERR(0, 592, __pyx_L1_error) + __PYX_ERR(0, 598, __pyx_L1_error) } - __pyx_t_3 = __Pyx_decode_bytearray(__pyx_v_self->__pyx_base._buf, 0, PY_SSIZE_T_MAX, NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 592, __pyx_L1_error) + __pyx_t_3 = __Pyx_decode_bytearray(__pyx_v_self->__pyx_base._buf, 0, PY_SSIZE_T_MAX, NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 598, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_GIVEREF(__pyx_t_3); __Pyx_GOTREF(__pyx_v_self->__pyx_base._path); @@ -13826,7 +13993,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ __pyx_v_self->__pyx_base._path = ((PyObject*)__pyx_t_3); __pyx_t_3 = 0; - /* "aiohttp/_http_parser.pyx":593 + /* "aiohttp/_http_parser.pyx":599 * return * self._path = self._buf.decode('utf-8', 'surrogateescape') * try: # <<<<<<<<<<<<<< @@ -13835,7 +14002,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ */ /*try:*/ { - /* "aiohttp/_http_parser.pyx":594 + /* "aiohttp/_http_parser.pyx":600 * self._path = self._buf.decode('utf-8', 'surrogateescape') * try: * idx3 = len(self._path) # <<<<<<<<<<<<<< @@ -13846,13 +14013,13 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ __Pyx_INCREF(__pyx_t_3); if (unlikely(__pyx_t_3 == Py_None)) { PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); - __PYX_ERR(0, 594, __pyx_L5_error) + __PYX_ERR(0, 600, __pyx_L5_error) } - __pyx_t_4 = __Pyx_PyUnicode_GET_LENGTH(__pyx_t_3); if (unlikely(__pyx_t_4 == ((Py_ssize_t)-1))) __PYX_ERR(0, 594, __pyx_L5_error) + __pyx_t_4 = __Pyx_PyUnicode_GET_LENGTH(__pyx_t_3); if (unlikely(__pyx_t_4 == ((Py_ssize_t)-1))) __PYX_ERR(0, 600, __pyx_L5_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_v_idx3 = __pyx_t_4; - /* "aiohttp/_http_parser.pyx":595 + /* "aiohttp/_http_parser.pyx":601 * try: * idx3 = len(self._path) * if self._cparser.method == cparser.HTTP_CONNECT: # <<<<<<<<<<<<<< @@ -13862,20 +14029,20 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ __pyx_t_2 = (__pyx_v_self->__pyx_base._cparser->method == HTTP_CONNECT); if (__pyx_t_2) { - /* "aiohttp/_http_parser.pyx":598 + /* "aiohttp/_http_parser.pyx":604 * # authority-form, * # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3 * self._url = URL.build(authority=self._path, encoded=True) # <<<<<<<<<<<<<< * elif idx3 > 1 and self._path[0] == '/': * # origin-form, */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_7aiohttp_12_http_parser_URL, __pyx_n_s_build); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 598, __pyx_L5_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_7aiohttp_12_http_parser_URL, __pyx_n_s_build); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 604, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_3); - __pyx_t_5 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 598, __pyx_L5_error) + __pyx_t_5 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 604, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_5); - if (PyDict_SetItem(__pyx_t_5, __pyx_n_s_authority, __pyx_v_self->__pyx_base._path) < 0) __PYX_ERR(0, 598, __pyx_L5_error) - if (PyDict_SetItem(__pyx_t_5, __pyx_n_s_encoded, Py_True) < 0) __PYX_ERR(0, 598, __pyx_L5_error) - __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_empty_tuple, __pyx_t_5); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 598, __pyx_L5_error) + if (PyDict_SetItem(__pyx_t_5, __pyx_n_s_authority, __pyx_v_self->__pyx_base._path) < 0) __PYX_ERR(0, 604, __pyx_L5_error) + if (PyDict_SetItem(__pyx_t_5, __pyx_n_s_encoded, Py_True) < 0) __PYX_ERR(0, 604, __pyx_L5_error) + __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_empty_tuple, __pyx_t_5); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 604, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; @@ -13885,7 +14052,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ __pyx_v_self->__pyx_base._url = __pyx_t_6; __pyx_t_6 = 0; - /* "aiohttp/_http_parser.pyx":595 + /* "aiohttp/_http_parser.pyx":601 * try: * idx3 = len(self._path) * if self._cparser.method == cparser.HTTP_CONNECT: # <<<<<<<<<<<<<< @@ -13895,7 +14062,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ goto __pyx_L7; } - /* "aiohttp/_http_parser.pyx":599 + /* "aiohttp/_http_parser.pyx":605 * # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3 * self._url = URL.build(authority=self._path, encoded=True) * elif idx3 > 1 and self._path[0] == '/': # <<<<<<<<<<<<<< @@ -13908,13 +14075,13 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ __pyx_t_2 = __pyx_t_1; goto __pyx_L8_bool_binop_done; } - __pyx_t_7 = __Pyx_GetItemInt_Unicode(__pyx_v_self->__pyx_base._path, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(__pyx_t_7 == (Py_UCS4)-1)) __PYX_ERR(0, 599, __pyx_L5_error) + __pyx_t_7 = __Pyx_GetItemInt_Unicode(__pyx_v_self->__pyx_base._path, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(__pyx_t_7 == (Py_UCS4)-1)) __PYX_ERR(0, 605, __pyx_L5_error) __pyx_t_1 = (__pyx_t_7 == 47); __pyx_t_2 = __pyx_t_1; __pyx_L8_bool_binop_done:; if (__pyx_t_2) { - /* "aiohttp/_http_parser.pyx":602 + /* "aiohttp/_http_parser.pyx":608 * # origin-form, * # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1 * idx1 = self._path.find("?") # <<<<<<<<<<<<<< @@ -13923,12 +14090,12 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ */ if (unlikely(__pyx_v_self->__pyx_base._path == Py_None)) { PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "find"); - __PYX_ERR(0, 602, __pyx_L5_error) + __PYX_ERR(0, 608, __pyx_L5_error) } - __pyx_t_4 = PyUnicode_Find(__pyx_v_self->__pyx_base._path, __pyx_kp_u__9, 0, PY_SSIZE_T_MAX, 1); if (unlikely(__pyx_t_4 == ((Py_ssize_t)-2))) __PYX_ERR(0, 602, __pyx_L5_error) + __pyx_t_4 = PyUnicode_Find(__pyx_v_self->__pyx_base._path, __pyx_kp_u__10, 0, PY_SSIZE_T_MAX, 1); if (unlikely(__pyx_t_4 == ((Py_ssize_t)-2))) __PYX_ERR(0, 608, __pyx_L5_error) __pyx_v_idx1 = __pyx_t_4; - /* "aiohttp/_http_parser.pyx":603 + /* "aiohttp/_http_parser.pyx":609 * # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1 * idx1 = self._path.find("?") * if idx1 == -1: # <<<<<<<<<<<<<< @@ -13938,17 +14105,17 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ __pyx_t_2 = (__pyx_v_idx1 == -1L); if (__pyx_t_2) { - /* "aiohttp/_http_parser.pyx":604 + /* "aiohttp/_http_parser.pyx":610 * idx1 = self._path.find("?") * if idx1 == -1: * query = "" # <<<<<<<<<<<<<< * idx2 = self._path.find("#") * if idx2 == -1: */ - __Pyx_INCREF(__pyx_kp_u__8); - __pyx_v_query = __pyx_kp_u__8; + __Pyx_INCREF(__pyx_kp_u__4); + __pyx_v_query = __pyx_kp_u__4; - /* "aiohttp/_http_parser.pyx":605 + /* "aiohttp/_http_parser.pyx":611 * if idx1 == -1: * query = "" * idx2 = self._path.find("#") # <<<<<<<<<<<<<< @@ -13957,12 +14124,12 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ */ if (unlikely(__pyx_v_self->__pyx_base._path == Py_None)) { PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "find"); - __PYX_ERR(0, 605, __pyx_L5_error) + __PYX_ERR(0, 611, __pyx_L5_error) } - __pyx_t_4 = PyUnicode_Find(__pyx_v_self->__pyx_base._path, __pyx_kp_u__10, 0, PY_SSIZE_T_MAX, 1); if (unlikely(__pyx_t_4 == ((Py_ssize_t)-2))) __PYX_ERR(0, 605, __pyx_L5_error) + __pyx_t_4 = PyUnicode_Find(__pyx_v_self->__pyx_base._path, __pyx_kp_u__11, 0, PY_SSIZE_T_MAX, 1); if (unlikely(__pyx_t_4 == ((Py_ssize_t)-2))) __PYX_ERR(0, 611, __pyx_L5_error) __pyx_v_idx2 = __pyx_t_4; - /* "aiohttp/_http_parser.pyx":606 + /* "aiohttp/_http_parser.pyx":612 * query = "" * idx2 = self._path.find("#") * if idx2 == -1: # <<<<<<<<<<<<<< @@ -13972,7 +14139,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ __pyx_t_2 = (__pyx_v_idx2 == -1L); if (__pyx_t_2) { - /* "aiohttp/_http_parser.pyx":607 + /* "aiohttp/_http_parser.pyx":613 * idx2 = self._path.find("#") * if idx2 == -1: * path = self._path # <<<<<<<<<<<<<< @@ -13984,17 +14151,17 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ __pyx_v_path = ((PyObject*)__pyx_t_6); __pyx_t_6 = 0; - /* "aiohttp/_http_parser.pyx":608 + /* "aiohttp/_http_parser.pyx":614 * if idx2 == -1: * path = self._path * fragment = "" # <<<<<<<<<<<<<< * else: * path = self._path[0: idx2] */ - __Pyx_INCREF(__pyx_kp_u__8); - __pyx_v_fragment = __pyx_kp_u__8; + __Pyx_INCREF(__pyx_kp_u__4); + __pyx_v_fragment = __pyx_kp_u__4; - /* "aiohttp/_http_parser.pyx":606 + /* "aiohttp/_http_parser.pyx":612 * query = "" * idx2 = self._path.find("#") * if idx2 == -1: # <<<<<<<<<<<<<< @@ -14004,7 +14171,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ goto __pyx_L11; } - /* "aiohttp/_http_parser.pyx":610 + /* "aiohttp/_http_parser.pyx":616 * fragment = "" * else: * path = self._path[0: idx2] # <<<<<<<<<<<<<< @@ -14014,14 +14181,14 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ /*else*/ { if (unlikely(__pyx_v_self->__pyx_base._path == Py_None)) { PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(0, 610, __pyx_L5_error) + __PYX_ERR(0, 616, __pyx_L5_error) } - __pyx_t_6 = __Pyx_PyUnicode_Substring(__pyx_v_self->__pyx_base._path, 0, __pyx_v_idx2); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 610, __pyx_L5_error) + __pyx_t_6 = __Pyx_PyUnicode_Substring(__pyx_v_self->__pyx_base._path, 0, __pyx_v_idx2); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 616, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_6); __pyx_v_path = ((PyObject*)__pyx_t_6); __pyx_t_6 = 0; - /* "aiohttp/_http_parser.pyx":611 + /* "aiohttp/_http_parser.pyx":617 * else: * path = self._path[0: idx2] * fragment = self._path[idx2+1:] # <<<<<<<<<<<<<< @@ -14030,16 +14197,16 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ */ if (unlikely(__pyx_v_self->__pyx_base._path == Py_None)) { PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(0, 611, __pyx_L5_error) + __PYX_ERR(0, 617, __pyx_L5_error) } - __pyx_t_6 = __Pyx_PyUnicode_Substring(__pyx_v_self->__pyx_base._path, (__pyx_v_idx2 + 1), PY_SSIZE_T_MAX); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 611, __pyx_L5_error) + __pyx_t_6 = __Pyx_PyUnicode_Substring(__pyx_v_self->__pyx_base._path, (__pyx_v_idx2 + 1), PY_SSIZE_T_MAX); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 617, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_6); __pyx_v_fragment = ((PyObject*)__pyx_t_6); __pyx_t_6 = 0; } __pyx_L11:; - /* "aiohttp/_http_parser.pyx":603 + /* "aiohttp/_http_parser.pyx":609 * # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1 * idx1 = self._path.find("?") * if idx1 == -1: # <<<<<<<<<<<<<< @@ -14049,7 +14216,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ goto __pyx_L10; } - /* "aiohttp/_http_parser.pyx":614 + /* "aiohttp/_http_parser.pyx":620 * * else: * path = self._path[0:idx1] # <<<<<<<<<<<<<< @@ -14059,14 +14226,14 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ /*else*/ { if (unlikely(__pyx_v_self->__pyx_base._path == Py_None)) { PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(0, 614, __pyx_L5_error) + __PYX_ERR(0, 620, __pyx_L5_error) } - __pyx_t_6 = __Pyx_PyUnicode_Substring(__pyx_v_self->__pyx_base._path, 0, __pyx_v_idx1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 614, __pyx_L5_error) + __pyx_t_6 = __Pyx_PyUnicode_Substring(__pyx_v_self->__pyx_base._path, 0, __pyx_v_idx1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 620, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_6); __pyx_v_path = ((PyObject*)__pyx_t_6); __pyx_t_6 = 0; - /* "aiohttp/_http_parser.pyx":615 + /* "aiohttp/_http_parser.pyx":621 * else: * path = self._path[0:idx1] * idx1 += 1 # <<<<<<<<<<<<<< @@ -14075,7 +14242,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ */ __pyx_v_idx1 = (__pyx_v_idx1 + 1); - /* "aiohttp/_http_parser.pyx":616 + /* "aiohttp/_http_parser.pyx":622 * path = self._path[0:idx1] * idx1 += 1 * idx2 = self._path.find("#", idx1+1) # <<<<<<<<<<<<<< @@ -14084,16 +14251,16 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ */ if (unlikely(__pyx_v_self->__pyx_base._path == Py_None)) { PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "find"); - __PYX_ERR(0, 616, __pyx_L5_error) + __PYX_ERR(0, 622, __pyx_L5_error) } - __pyx_t_6 = __Pyx_PyInt_From_long((__pyx_v_idx1 + 1)); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 616, __pyx_L5_error) + __pyx_t_6 = __Pyx_PyInt_From_long((__pyx_v_idx1 + 1)); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 622, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_6); - __pyx_t_4 = (__Pyx_Py_IsNone(__pyx_t_6) ? (0) : (__Pyx_PyIndex_AsSsize_t(__pyx_t_6))); if (unlikely((__pyx_t_4 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 616, __pyx_L5_error) + __pyx_t_4 = (__Pyx_Py_IsNone(__pyx_t_6) ? (0) : (__Pyx_PyIndex_AsSsize_t(__pyx_t_6))); if (unlikely((__pyx_t_4 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 622, __pyx_L5_error) __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __pyx_t_8 = PyUnicode_Find(__pyx_v_self->__pyx_base._path, __pyx_kp_u__10, __pyx_t_4, PY_SSIZE_T_MAX, 1); if (unlikely(__pyx_t_8 == ((Py_ssize_t)-2))) __PYX_ERR(0, 616, __pyx_L5_error) + __pyx_t_8 = PyUnicode_Find(__pyx_v_self->__pyx_base._path, __pyx_kp_u__11, __pyx_t_4, PY_SSIZE_T_MAX, 1); if (unlikely(__pyx_t_8 == ((Py_ssize_t)-2))) __PYX_ERR(0, 622, __pyx_L5_error) __pyx_v_idx2 = __pyx_t_8; - /* "aiohttp/_http_parser.pyx":617 + /* "aiohttp/_http_parser.pyx":623 * idx1 += 1 * idx2 = self._path.find("#", idx1+1) * if idx2 == -1: # <<<<<<<<<<<<<< @@ -14103,7 +14270,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ __pyx_t_2 = (__pyx_v_idx2 == -1L); if (__pyx_t_2) { - /* "aiohttp/_http_parser.pyx":618 + /* "aiohttp/_http_parser.pyx":624 * idx2 = self._path.find("#", idx1+1) * if idx2 == -1: * query = self._path[idx1:] # <<<<<<<<<<<<<< @@ -14112,24 +14279,24 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ */ if (unlikely(__pyx_v_self->__pyx_base._path == Py_None)) { PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(0, 618, __pyx_L5_error) + __PYX_ERR(0, 624, __pyx_L5_error) } - __pyx_t_6 = __Pyx_PyUnicode_Substring(__pyx_v_self->__pyx_base._path, __pyx_v_idx1, PY_SSIZE_T_MAX); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 618, __pyx_L5_error) + __pyx_t_6 = __Pyx_PyUnicode_Substring(__pyx_v_self->__pyx_base._path, __pyx_v_idx1, PY_SSIZE_T_MAX); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 624, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_6); __pyx_v_query = ((PyObject*)__pyx_t_6); __pyx_t_6 = 0; - /* "aiohttp/_http_parser.pyx":619 + /* "aiohttp/_http_parser.pyx":625 * if idx2 == -1: * query = self._path[idx1:] * fragment = "" # <<<<<<<<<<<<<< * else: * query = self._path[idx1: idx2] */ - __Pyx_INCREF(__pyx_kp_u__8); - __pyx_v_fragment = __pyx_kp_u__8; + __Pyx_INCREF(__pyx_kp_u__4); + __pyx_v_fragment = __pyx_kp_u__4; - /* "aiohttp/_http_parser.pyx":617 + /* "aiohttp/_http_parser.pyx":623 * idx1 += 1 * idx2 = self._path.find("#", idx1+1) * if idx2 == -1: # <<<<<<<<<<<<<< @@ -14139,7 +14306,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ goto __pyx_L12; } - /* "aiohttp/_http_parser.pyx":621 + /* "aiohttp/_http_parser.pyx":627 * fragment = "" * else: * query = self._path[idx1: idx2] # <<<<<<<<<<<<<< @@ -14149,14 +14316,14 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ /*else*/ { if (unlikely(__pyx_v_self->__pyx_base._path == Py_None)) { PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(0, 621, __pyx_L5_error) + __PYX_ERR(0, 627, __pyx_L5_error) } - __pyx_t_6 = __Pyx_PyUnicode_Substring(__pyx_v_self->__pyx_base._path, __pyx_v_idx1, __pyx_v_idx2); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 621, __pyx_L5_error) + __pyx_t_6 = __Pyx_PyUnicode_Substring(__pyx_v_self->__pyx_base._path, __pyx_v_idx1, __pyx_v_idx2); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 627, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_6); __pyx_v_query = ((PyObject*)__pyx_t_6); __pyx_t_6 = 0; - /* "aiohttp/_http_parser.pyx":622 + /* "aiohttp/_http_parser.pyx":628 * else: * query = self._path[idx1: idx2] * fragment = self._path[idx2+1:] # <<<<<<<<<<<<<< @@ -14165,9 +14332,9 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ */ if (unlikely(__pyx_v_self->__pyx_base._path == Py_None)) { PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(0, 622, __pyx_L5_error) + __PYX_ERR(0, 628, __pyx_L5_error) } - __pyx_t_6 = __Pyx_PyUnicode_Substring(__pyx_v_self->__pyx_base._path, (__pyx_v_idx2 + 1), PY_SSIZE_T_MAX); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 622, __pyx_L5_error) + __pyx_t_6 = __Pyx_PyUnicode_Substring(__pyx_v_self->__pyx_base._path, (__pyx_v_idx2 + 1), PY_SSIZE_T_MAX); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 628, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_6); __pyx_v_fragment = ((PyObject*)__pyx_t_6); __pyx_t_6 = 0; @@ -14176,62 +14343,62 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ } __pyx_L10:; - /* "aiohttp/_http_parser.pyx":624 + /* "aiohttp/_http_parser.pyx":630 * fragment = self._path[idx2+1:] * * self._url = URL.build( # <<<<<<<<<<<<<< * path=path, * query_string=query, */ - __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_7aiohttp_12_http_parser_URL, __pyx_n_s_build); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 624, __pyx_L5_error) + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_7aiohttp_12_http_parser_URL, __pyx_n_s_build); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 630, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_6); - /* "aiohttp/_http_parser.pyx":625 + /* "aiohttp/_http_parser.pyx":631 * * self._url = URL.build( * path=path, # <<<<<<<<<<<<<< * query_string=query, * fragment=fragment, */ - __pyx_t_5 = __Pyx_PyDict_NewPresized(4); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 625, __pyx_L5_error) + __pyx_t_5 = __Pyx_PyDict_NewPresized(4); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 631, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_5); - if (PyDict_SetItem(__pyx_t_5, __pyx_n_s_path, __pyx_v_path) < 0) __PYX_ERR(0, 625, __pyx_L5_error) + if (PyDict_SetItem(__pyx_t_5, __pyx_n_s_path, __pyx_v_path) < 0) __PYX_ERR(0, 631, __pyx_L5_error) - /* "aiohttp/_http_parser.pyx":626 + /* "aiohttp/_http_parser.pyx":632 * self._url = URL.build( * path=path, * query_string=query, # <<<<<<<<<<<<<< * fragment=fragment, * encoded=True, */ - if (PyDict_SetItem(__pyx_t_5, __pyx_n_s_query_string, __pyx_v_query) < 0) __PYX_ERR(0, 625, __pyx_L5_error) + if (PyDict_SetItem(__pyx_t_5, __pyx_n_s_query_string, __pyx_v_query) < 0) __PYX_ERR(0, 631, __pyx_L5_error) - /* "aiohttp/_http_parser.pyx":627 + /* "aiohttp/_http_parser.pyx":633 * path=path, * query_string=query, * fragment=fragment, # <<<<<<<<<<<<<< * encoded=True, * ) */ - if (PyDict_SetItem(__pyx_t_5, __pyx_n_s_fragment, __pyx_v_fragment) < 0) __PYX_ERR(0, 625, __pyx_L5_error) + if (PyDict_SetItem(__pyx_t_5, __pyx_n_s_fragment, __pyx_v_fragment) < 0) __PYX_ERR(0, 631, __pyx_L5_error) - /* "aiohttp/_http_parser.pyx":628 + /* "aiohttp/_http_parser.pyx":634 * query_string=query, * fragment=fragment, * encoded=True, # <<<<<<<<<<<<<< * ) * else: */ - if (PyDict_SetItem(__pyx_t_5, __pyx_n_s_encoded, Py_True) < 0) __PYX_ERR(0, 625, __pyx_L5_error) + if (PyDict_SetItem(__pyx_t_5, __pyx_n_s_encoded, Py_True) < 0) __PYX_ERR(0, 631, __pyx_L5_error) - /* "aiohttp/_http_parser.pyx":624 + /* "aiohttp/_http_parser.pyx":630 * fragment = self._path[idx2+1:] * * self._url = URL.build( # <<<<<<<<<<<<<< * path=path, * query_string=query, */ - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_empty_tuple, __pyx_t_5); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 624, __pyx_L5_error) + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_empty_tuple, __pyx_t_5); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 630, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; @@ -14241,7 +14408,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ __pyx_v_self->__pyx_base._url = __pyx_t_3; __pyx_t_3 = 0; - /* "aiohttp/_http_parser.pyx":599 + /* "aiohttp/_http_parser.pyx":605 * # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3 * self._url = URL.build(authority=self._path, encoded=True) * elif idx3 > 1 and self._path[0] == '/': # <<<<<<<<<<<<<< @@ -14251,7 +14418,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ goto __pyx_L7; } - /* "aiohttp/_http_parser.pyx":633 + /* "aiohttp/_http_parser.pyx":639 * # absolute-form for proxy maybe, * # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2 * self._url = URL(self._path, encoded=True) # <<<<<<<<<<<<<< @@ -14259,15 +14426,15 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ * PyByteArray_Resize(self._buf, 0) */ /*else*/ { - __pyx_t_3 = PyTuple_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 633, __pyx_L5_error) + __pyx_t_3 = PyTuple_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 639, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_INCREF(__pyx_v_self->__pyx_base._path); __Pyx_GIVEREF(__pyx_v_self->__pyx_base._path); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_self->__pyx_base._path)) __PYX_ERR(0, 633, __pyx_L5_error); - __pyx_t_5 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 633, __pyx_L5_error) + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_self->__pyx_base._path)) __PYX_ERR(0, 639, __pyx_L5_error); + __pyx_t_5 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 639, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_5); - if (PyDict_SetItem(__pyx_t_5, __pyx_n_s_encoded, Py_True) < 0) __PYX_ERR(0, 633, __pyx_L5_error) - __pyx_t_6 = __Pyx_PyObject_Call(__pyx_v_7aiohttp_12_http_parser_URL, __pyx_t_3, __pyx_t_5); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 633, __pyx_L5_error) + if (PyDict_SetItem(__pyx_t_5, __pyx_n_s_encoded, Py_True) < 0) __PYX_ERR(0, 639, __pyx_L5_error) + __pyx_t_6 = __Pyx_PyObject_Call(__pyx_v_7aiohttp_12_http_parser_URL, __pyx_t_3, __pyx_t_5); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 639, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; @@ -14280,7 +14447,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ __pyx_L7:; } - /* "aiohttp/_http_parser.pyx":635 + /* "aiohttp/_http_parser.pyx":641 * self._url = URL(self._path, encoded=True) * finally: * PyByteArray_Resize(self._buf, 0) # <<<<<<<<<<<<<< @@ -14291,7 +14458,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ /*normal exit:*/{ __pyx_t_6 = __pyx_v_self->__pyx_base._buf; __Pyx_INCREF(__pyx_t_6); - __pyx_t_9 = PyByteArray_Resize(__pyx_t_6, 0); if (unlikely(__pyx_t_9 == ((int)-1))) __PYX_ERR(0, 635, __pyx_L1_error) + __pyx_t_9 = PyByteArray_Resize(__pyx_t_6, 0); if (unlikely(__pyx_t_9 == ((int)-1))) __PYX_ERR(0, 641, __pyx_L1_error) __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; goto __pyx_L6; } @@ -14315,7 +14482,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ { __pyx_t_6 = __pyx_v_self->__pyx_base._buf; __Pyx_INCREF(__pyx_t_6); - __pyx_t_18 = PyByteArray_Resize(__pyx_t_6, 0); if (unlikely(__pyx_t_18 == ((int)-1))) __PYX_ERR(0, 635, __pyx_L14_error) + __pyx_t_18 = PyByteArray_Resize(__pyx_t_6, 0); if (unlikely(__pyx_t_18 == ((int)-1))) __PYX_ERR(0, 641, __pyx_L14_error) __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; } if (PY_MAJOR_VERSION >= 3) { @@ -14347,7 +14514,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_ __pyx_L6:; } - /* "aiohttp/_http_parser.pyx":588 + /* "aiohttp/_http_parser.pyx":594 * auto_decompress) * * cdef object _on_status_complete(self): # <<<<<<<<<<<<<< @@ -14587,7 +14754,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_17HttpRequestParser_4__setstat return __pyx_r; } -/* "aiohttp/_http_parser.pyx":640 +/* "aiohttp/_http_parser.pyx":646 * cdef class HttpResponseParser(HttpParser): * * def __init__( # <<<<<<<<<<<<<< @@ -14627,7 +14794,7 @@ static int __pyx_pw_7aiohttp_12_http_parser_18HttpResponseParser_1__init__(PyObj { PyObject **__pyx_pyargnames[] = {&__pyx_n_s_protocol,&__pyx_n_s_loop,&__pyx_n_s_limit,&__pyx_n_s_timer,&__pyx_n_s_max_line_size,&__pyx_n_s_max_headers,&__pyx_n_s_max_field_size,&__pyx_n_s_payload_exception,&__pyx_n_s_response_with_body,&__pyx_n_s_read_until_eof,&__pyx_n_s_auto_decompress,0}; - /* "aiohttp/_http_parser.pyx":641 + /* "aiohttp/_http_parser.pyx":647 * * def __init__( * self, protocol, loop, int limit, timer=None, # <<<<<<<<<<<<<< @@ -14636,7 +14803,7 @@ static int __pyx_pw_7aiohttp_12_http_parser_18HttpResponseParser_1__init__(PyObj */ values[3] = __Pyx_Arg_NewRef_VARARGS(((PyObject *)Py_None)); - /* "aiohttp/_http_parser.pyx":643 + /* "aiohttp/_http_parser.pyx":649 * self, protocol, loop, int limit, timer=None, * size_t max_line_size=8190, size_t max_headers=32768, * size_t max_field_size=8190, payload_exception=None, # <<<<<<<<<<<<<< @@ -14679,7 +14846,7 @@ static int __pyx_pw_7aiohttp_12_http_parser_18HttpResponseParser_1__init__(PyObj (void)__Pyx_Arg_NewRef_VARARGS(values[0]); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 640, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 646, __pyx_L3_error) else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: @@ -14687,9 +14854,9 @@ static int __pyx_pw_7aiohttp_12_http_parser_18HttpResponseParser_1__init__(PyObj (void)__Pyx_Arg_NewRef_VARARGS(values[1]); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 640, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 646, __pyx_L3_error) else { - __Pyx_RaiseArgtupleInvalid("__init__", 0, 3, 11, 1); __PYX_ERR(0, 640, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("__init__", 0, 3, 11, 1); __PYX_ERR(0, 646, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 2: @@ -14697,70 +14864,70 @@ static int __pyx_pw_7aiohttp_12_http_parser_18HttpResponseParser_1__init__(PyObj (void)__Pyx_Arg_NewRef_VARARGS(values[2]); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 640, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 646, __pyx_L3_error) else { - __Pyx_RaiseArgtupleInvalid("__init__", 0, 3, 11, 2); __PYX_ERR(0, 640, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("__init__", 0, 3, 11, 2); __PYX_ERR(0, 646, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 3: if (kw_args > 0) { PyObject* value = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_timer); if (value) { values[3] = __Pyx_Arg_NewRef_VARARGS(value); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 640, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 646, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 4: if (kw_args > 0) { PyObject* value = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_max_line_size); if (value) { values[4] = __Pyx_Arg_NewRef_VARARGS(value); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 640, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 646, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 5: if (kw_args > 0) { PyObject* value = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_max_headers); if (value) { values[5] = __Pyx_Arg_NewRef_VARARGS(value); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 640, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 646, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 6: if (kw_args > 0) { PyObject* value = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_max_field_size); if (value) { values[6] = __Pyx_Arg_NewRef_VARARGS(value); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 640, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 646, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 7: if (kw_args > 0) { PyObject* value = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_payload_exception); if (value) { values[7] = __Pyx_Arg_NewRef_VARARGS(value); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 640, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 646, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 8: if (kw_args > 0) { PyObject* value = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_response_with_body); if (value) { values[8] = __Pyx_Arg_NewRef_VARARGS(value); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 640, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 646, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 9: if (kw_args > 0) { PyObject* value = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_read_until_eof); if (value) { values[9] = __Pyx_Arg_NewRef_VARARGS(value); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 640, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 646, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 10: if (kw_args > 0) { PyObject* value = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_auto_decompress); if (value) { values[10] = __Pyx_Arg_NewRef_VARARGS(value); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 640, __pyx_L3_error) + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 646, __pyx_L3_error) } } if (unlikely(kw_args > 0)) { const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__init__") < 0)) __PYX_ERR(0, 640, __pyx_L3_error) + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__init__") < 0)) __PYX_ERR(0, 646, __pyx_L3_error) } } else { switch (__pyx_nargs) { @@ -14789,29 +14956,29 @@ static int __pyx_pw_7aiohttp_12_http_parser_18HttpResponseParser_1__init__(PyObj } __pyx_v_protocol = values[0]; __pyx_v_loop = values[1]; - __pyx_v_limit = __Pyx_PyInt_As_int(values[2]); if (unlikely((__pyx_v_limit == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 641, __pyx_L3_error) + __pyx_v_limit = __Pyx_PyInt_As_int(values[2]); if (unlikely((__pyx_v_limit == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 647, __pyx_L3_error) __pyx_v_timer = values[3]; if (values[4]) { - __pyx_v_max_line_size = __Pyx_PyInt_As_size_t(values[4]); if (unlikely((__pyx_v_max_line_size == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 642, __pyx_L3_error) + __pyx_v_max_line_size = __Pyx_PyInt_As_size_t(values[4]); if (unlikely((__pyx_v_max_line_size == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 648, __pyx_L3_error) } else { __pyx_v_max_line_size = ((size_t)0x1FFE); } if (values[5]) { - __pyx_v_max_headers = __Pyx_PyInt_As_size_t(values[5]); if (unlikely((__pyx_v_max_headers == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 642, __pyx_L3_error) + __pyx_v_max_headers = __Pyx_PyInt_As_size_t(values[5]); if (unlikely((__pyx_v_max_headers == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 648, __pyx_L3_error) } else { __pyx_v_max_headers = ((size_t)0x8000); } if (values[6]) { - __pyx_v_max_field_size = __Pyx_PyInt_As_size_t(values[6]); if (unlikely((__pyx_v_max_field_size == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 643, __pyx_L3_error) + __pyx_v_max_field_size = __Pyx_PyInt_As_size_t(values[6]); if (unlikely((__pyx_v_max_field_size == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 649, __pyx_L3_error) } else { __pyx_v_max_field_size = ((size_t)0x1FFE); } __pyx_v_payload_exception = values[7]; if (values[8]) { - __pyx_v_response_with_body = __Pyx_PyObject_IsTrue(values[8]); if (unlikely((__pyx_v_response_with_body == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 644, __pyx_L3_error) + __pyx_v_response_with_body = __Pyx_PyObject_IsTrue(values[8]); if (unlikely((__pyx_v_response_with_body == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 650, __pyx_L3_error) } else { - /* "aiohttp/_http_parser.pyx":644 + /* "aiohttp/_http_parser.pyx":650 * size_t max_line_size=8190, size_t max_headers=32768, * size_t max_field_size=8190, payload_exception=None, * bint response_with_body=True, bint read_until_eof=False, # <<<<<<<<<<<<<< @@ -14821,15 +14988,15 @@ static int __pyx_pw_7aiohttp_12_http_parser_18HttpResponseParser_1__init__(PyObj __pyx_v_response_with_body = ((int)1); } if (values[9]) { - __pyx_v_read_until_eof = __Pyx_PyObject_IsTrue(values[9]); if (unlikely((__pyx_v_read_until_eof == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 644, __pyx_L3_error) + __pyx_v_read_until_eof = __Pyx_PyObject_IsTrue(values[9]); if (unlikely((__pyx_v_read_until_eof == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 650, __pyx_L3_error) } else { __pyx_v_read_until_eof = ((int)0); } if (values[10]) { - __pyx_v_auto_decompress = __Pyx_PyObject_IsTrue(values[10]); if (unlikely((__pyx_v_auto_decompress == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 645, __pyx_L3_error) + __pyx_v_auto_decompress = __Pyx_PyObject_IsTrue(values[10]); if (unlikely((__pyx_v_auto_decompress == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 651, __pyx_L3_error) } else { - /* "aiohttp/_http_parser.pyx":645 + /* "aiohttp/_http_parser.pyx":651 * size_t max_field_size=8190, payload_exception=None, * bint response_with_body=True, bint read_until_eof=False, * bint auto_decompress=True # <<<<<<<<<<<<<< @@ -14841,7 +15008,7 @@ static int __pyx_pw_7aiohttp_12_http_parser_18HttpResponseParser_1__init__(PyObj } goto __pyx_L6_skip; __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__init__", 0, 3, 11, __pyx_nargs); __PYX_ERR(0, 640, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("__init__", 0, 3, 11, __pyx_nargs); __PYX_ERR(0, 646, __pyx_L3_error) __pyx_L6_skip:; goto __pyx_L4_argument_unpacking_done; __pyx_L3_error:; @@ -14857,7 +15024,7 @@ static int __pyx_pw_7aiohttp_12_http_parser_18HttpResponseParser_1__init__(PyObj __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18HttpResponseParser___init__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParser *)__pyx_v_self), __pyx_v_protocol, __pyx_v_loop, __pyx_v_limit, __pyx_v_timer, __pyx_v_max_line_size, __pyx_v_max_headers, __pyx_v_max_field_size, __pyx_v_payload_exception, __pyx_v_response_with_body, __pyx_v_read_until_eof, __pyx_v_auto_decompress); - /* "aiohttp/_http_parser.pyx":640 + /* "aiohttp/_http_parser.pyx":646 * cdef class HttpResponseParser(HttpParser): * * def __init__( # <<<<<<<<<<<<<< @@ -14888,7 +15055,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_18HttpResponseParser___init__(struct int __pyx_clineno = 0; __Pyx_RefNannySetupContext("__init__", 1); - /* "aiohttp/_http_parser.pyx":647 + /* "aiohttp/_http_parser.pyx":653 * bint auto_decompress=True * ): * self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer, # <<<<<<<<<<<<<< @@ -14904,25 +15071,25 @@ static int __pyx_pf_7aiohttp_12_http_parser_18HttpResponseParser___init__(struct __pyx_t_2.response_with_body = __pyx_v_response_with_body; __pyx_t_2.read_until_eof = __pyx_v_read_until_eof; __pyx_t_2.auto_decompress = __pyx_v_auto_decompress; - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpResponseParser *)__pyx_v_self->__pyx_base.__pyx_vtab)->__pyx_base._init(((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self), HTTP_RESPONSE, __pyx_v_protocol, __pyx_v_loop, __pyx_v_limit, &__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 647, __pyx_L1_error) + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpResponseParser *)__pyx_v_self->__pyx_base.__pyx_vtab)->__pyx_base._init(((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self), HTTP_RESPONSE, __pyx_v_protocol, __pyx_v_loop, __pyx_v_limit, &__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 653, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":652 + /* "aiohttp/_http_parser.pyx":658 * auto_decompress) * # Use strict parsing on dev mode, so users are warned about broken servers. * if not DEBUG: # <<<<<<<<<<<<<< * cparser.llhttp_set_lenient_headers(self._cparser, 1) * cparser.llhttp_set_lenient_optional_cr_before_lf(self._cparser, 1) */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_DEBUG); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 652, __pyx_L1_error) + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_DEBUG); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 658, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely((__pyx_t_3 < 0))) __PYX_ERR(0, 652, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely((__pyx_t_3 < 0))) __PYX_ERR(0, 658, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_t_4 = (!__pyx_t_3); if (__pyx_t_4) { - /* "aiohttp/_http_parser.pyx":653 + /* "aiohttp/_http_parser.pyx":659 * # Use strict parsing on dev mode, so users are warned about broken servers. * if not DEBUG: * cparser.llhttp_set_lenient_headers(self._cparser, 1) # <<<<<<<<<<<<<< @@ -14931,7 +15098,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_18HttpResponseParser___init__(struct */ llhttp_set_lenient_headers(__pyx_v_self->__pyx_base._cparser, 1); - /* "aiohttp/_http_parser.pyx":654 + /* "aiohttp/_http_parser.pyx":660 * if not DEBUG: * cparser.llhttp_set_lenient_headers(self._cparser, 1) * cparser.llhttp_set_lenient_optional_cr_before_lf(self._cparser, 1) # <<<<<<<<<<<<<< @@ -14940,7 +15107,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_18HttpResponseParser___init__(struct */ llhttp_set_lenient_optional_cr_before_lf(__pyx_v_self->__pyx_base._cparser, 1); - /* "aiohttp/_http_parser.pyx":655 + /* "aiohttp/_http_parser.pyx":661 * cparser.llhttp_set_lenient_headers(self._cparser, 1) * cparser.llhttp_set_lenient_optional_cr_before_lf(self._cparser, 1) * cparser.llhttp_set_lenient_spaces_after_chunk_size(self._cparser, 1) # <<<<<<<<<<<<<< @@ -14949,7 +15116,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_18HttpResponseParser___init__(struct */ llhttp_set_lenient_spaces_after_chunk_size(__pyx_v_self->__pyx_base._cparser, 1); - /* "aiohttp/_http_parser.pyx":652 + /* "aiohttp/_http_parser.pyx":658 * auto_decompress) * # Use strict parsing on dev mode, so users are warned about broken servers. * if not DEBUG: # <<<<<<<<<<<<<< @@ -14958,7 +15125,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_18HttpResponseParser___init__(struct */ } - /* "aiohttp/_http_parser.pyx":640 + /* "aiohttp/_http_parser.pyx":646 * cdef class HttpResponseParser(HttpParser): * * def __init__( # <<<<<<<<<<<<<< @@ -14978,7 +15145,7 @@ static int __pyx_pf_7aiohttp_12_http_parser_18HttpResponseParser___init__(struct return __pyx_r; } -/* "aiohttp/_http_parser.pyx":657 +/* "aiohttp/_http_parser.pyx":663 * cparser.llhttp_set_lenient_spaces_after_chunk_size(self._cparser, 1) * * cdef object _on_status_complete(self): # <<<<<<<<<<<<<< @@ -14997,7 +15164,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_18HttpResponseParser__on_status int __pyx_clineno = 0; __Pyx_RefNannySetupContext("_on_status_complete", 1); - /* "aiohttp/_http_parser.pyx":658 + /* "aiohttp/_http_parser.pyx":664 * * cdef object _on_status_complete(self): * if self._buf: # <<<<<<<<<<<<<< @@ -15007,7 +15174,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_18HttpResponseParser__on_status __pyx_t_1 = (__pyx_v_self->__pyx_base._buf != Py_None)&&(PyByteArray_GET_SIZE(__pyx_v_self->__pyx_base._buf) != 0); if (__pyx_t_1) { - /* "aiohttp/_http_parser.pyx":659 + /* "aiohttp/_http_parser.pyx":665 * cdef object _on_status_complete(self): * if self._buf: * self._reason = self._buf.decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< @@ -15016,9 +15183,9 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_18HttpResponseParser__on_status */ if (unlikely(__pyx_v_self->__pyx_base._buf == Py_None)) { PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "decode"); - __PYX_ERR(0, 659, __pyx_L1_error) + __PYX_ERR(0, 665, __pyx_L1_error) } - __pyx_t_2 = __Pyx_decode_bytearray(__pyx_v_self->__pyx_base._buf, 0, PY_SSIZE_T_MAX, NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 659, __pyx_L1_error) + __pyx_t_2 = __Pyx_decode_bytearray(__pyx_v_self->__pyx_base._buf, 0, PY_SSIZE_T_MAX, NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 665, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_GIVEREF(__pyx_t_2); __Pyx_GOTREF(__pyx_v_self->__pyx_base._reason); @@ -15026,7 +15193,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_18HttpResponseParser__on_status __pyx_v_self->__pyx_base._reason = ((PyObject*)__pyx_t_2); __pyx_t_2 = 0; - /* "aiohttp/_http_parser.pyx":660 + /* "aiohttp/_http_parser.pyx":666 * if self._buf: * self._reason = self._buf.decode('utf-8', 'surrogateescape') * PyByteArray_Resize(self._buf, 0) # <<<<<<<<<<<<<< @@ -15035,10 +15202,10 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_18HttpResponseParser__on_status */ __pyx_t_2 = __pyx_v_self->__pyx_base._buf; __Pyx_INCREF(__pyx_t_2); - __pyx_t_3 = PyByteArray_Resize(__pyx_t_2, 0); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(0, 660, __pyx_L1_error) + __pyx_t_3 = PyByteArray_Resize(__pyx_t_2, 0); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(0, 666, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - /* "aiohttp/_http_parser.pyx":658 + /* "aiohttp/_http_parser.pyx":664 * * cdef object _on_status_complete(self): * if self._buf: # <<<<<<<<<<<<<< @@ -15048,7 +15215,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_18HttpResponseParser__on_status goto __pyx_L3; } - /* "aiohttp/_http_parser.pyx":662 + /* "aiohttp/_http_parser.pyx":668 * PyByteArray_Resize(self._buf, 0) * else: * self._reason = self._reason or '' # <<<<<<<<<<<<<< @@ -15056,15 +15223,15 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_18HttpResponseParser__on_status * cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1: */ /*else*/ { - __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_self->__pyx_base._reason); if (unlikely((__pyx_t_1 < 0))) __PYX_ERR(0, 662, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_self->__pyx_base._reason); if (unlikely((__pyx_t_1 < 0))) __PYX_ERR(0, 668, __pyx_L1_error) if (!__pyx_t_1) { } else { __Pyx_INCREF(__pyx_v_self->__pyx_base._reason); __pyx_t_2 = __pyx_v_self->__pyx_base._reason; goto __pyx_L4_bool_binop_done; } - __Pyx_INCREF(__pyx_kp_u__8); - __pyx_t_2 = __pyx_kp_u__8; + __Pyx_INCREF(__pyx_kp_u__4); + __pyx_t_2 = __pyx_kp_u__4; __pyx_L4_bool_binop_done:; __Pyx_GIVEREF(__pyx_t_2); __Pyx_GOTREF(__pyx_v_self->__pyx_base._reason); @@ -15074,7 +15241,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_18HttpResponseParser__on_status } __pyx_L3:; - /* "aiohttp/_http_parser.pyx":657 + /* "aiohttp/_http_parser.pyx":663 * cparser.llhttp_set_lenient_spaces_after_chunk_size(self._cparser, 1) * * cdef object _on_status_complete(self): # <<<<<<<<<<<<<< @@ -15309,7 +15476,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_18HttpResponseParser_4__setsta return __pyx_r; } -/* "aiohttp/_http_parser.pyx":664 +/* "aiohttp/_http_parser.pyx":670 * self._reason = self._reason or '' * * cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1: # <<<<<<<<<<<<<< @@ -15330,7 +15497,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_begin(llhttp_t *__pyx_v int __pyx_clineno = 0; __Pyx_RefNannySetupContext("cb_on_message_begin", 1); - /* "aiohttp/_http_parser.pyx":665 + /* "aiohttp/_http_parser.pyx":671 * * cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1: * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< @@ -15342,7 +15509,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_begin(llhttp_t *__pyx_v __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":667 + /* "aiohttp/_http_parser.pyx":673 * cdef HttpParser pyparser = parser.data * * pyparser._started = True # <<<<<<<<<<<<<< @@ -15351,7 +15518,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_begin(llhttp_t *__pyx_v */ __pyx_v_pyparser->_started = 1; - /* "aiohttp/_http_parser.pyx":668 + /* "aiohttp/_http_parser.pyx":674 * * pyparser._started = True * pyparser._headers = CIMultiDict() # <<<<<<<<<<<<<< @@ -15377,7 +15544,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_begin(llhttp_t *__pyx_v PyObject *__pyx_callargs[2] = {__pyx_t_3, NULL}; __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 0+__pyx_t_4); __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 668, __pyx_L1_error) + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 674, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; } @@ -15387,14 +15554,14 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_begin(llhttp_t *__pyx_v __pyx_v_pyparser->_headers = __pyx_t_1; __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":669 + /* "aiohttp/_http_parser.pyx":675 * pyparser._started = True * pyparser._headers = CIMultiDict() * pyparser._raw_headers = [] # <<<<<<<<<<<<<< * PyByteArray_Resize(pyparser._buf, 0) * pyparser._path = None */ - __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 669, __pyx_L1_error) + __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 675, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_GIVEREF(__pyx_t_1); __Pyx_GOTREF(__pyx_v_pyparser->_raw_headers); @@ -15402,7 +15569,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_begin(llhttp_t *__pyx_v __pyx_v_pyparser->_raw_headers = ((PyObject*)__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":670 + /* "aiohttp/_http_parser.pyx":676 * pyparser._headers = CIMultiDict() * pyparser._raw_headers = [] * PyByteArray_Resize(pyparser._buf, 0) # <<<<<<<<<<<<<< @@ -15411,10 +15578,10 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_begin(llhttp_t *__pyx_v */ __pyx_t_1 = __pyx_v_pyparser->_buf; __Pyx_INCREF(__pyx_t_1); - __pyx_t_4 = PyByteArray_Resize(__pyx_t_1, 0); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(0, 670, __pyx_L1_error) + __pyx_t_4 = PyByteArray_Resize(__pyx_t_1, 0); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(0, 676, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":671 + /* "aiohttp/_http_parser.pyx":677 * pyparser._raw_headers = [] * PyByteArray_Resize(pyparser._buf, 0) * pyparser._path = None # <<<<<<<<<<<<<< @@ -15427,7 +15594,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_begin(llhttp_t *__pyx_v __Pyx_DECREF(__pyx_v_pyparser->_path); __pyx_v_pyparser->_path = ((PyObject*)Py_None); - /* "aiohttp/_http_parser.pyx":672 + /* "aiohttp/_http_parser.pyx":678 * PyByteArray_Resize(pyparser._buf, 0) * pyparser._path = None * pyparser._reason = None # <<<<<<<<<<<<<< @@ -15440,7 +15607,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_begin(llhttp_t *__pyx_v __Pyx_DECREF(__pyx_v_pyparser->_reason); __pyx_v_pyparser->_reason = ((PyObject*)Py_None); - /* "aiohttp/_http_parser.pyx":673 + /* "aiohttp/_http_parser.pyx":679 * pyparser._path = None * pyparser._reason = None * return 0 # <<<<<<<<<<<<<< @@ -15450,7 +15617,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_begin(llhttp_t *__pyx_v __pyx_r = 0; goto __pyx_L0; - /* "aiohttp/_http_parser.pyx":664 + /* "aiohttp/_http_parser.pyx":670 * self._reason = self._reason or '' * * cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1: # <<<<<<<<<<<<<< @@ -15471,7 +15638,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_begin(llhttp_t *__pyx_v return __pyx_r; } -/* "aiohttp/_http_parser.pyx":676 +/* "aiohttp/_http_parser.pyx":682 * * * cdef int cb_on_url(cparser.llhttp_t* parser, # <<<<<<<<<<<<<< @@ -15499,7 +15666,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_url(llhttp_t *__pyx_v_parser, c int __pyx_clineno = 0; __Pyx_RefNannySetupContext("cb_on_url", 1); - /* "aiohttp/_http_parser.pyx":678 + /* "aiohttp/_http_parser.pyx":684 * cdef int cb_on_url(cparser.llhttp_t* parser, * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< @@ -15511,7 +15678,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_url(llhttp_t *__pyx_v_parser, c __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":679 + /* "aiohttp/_http_parser.pyx":685 * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< @@ -15527,7 +15694,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_url(llhttp_t *__pyx_v_parser, c __Pyx_XGOTREF(__pyx_t_4); /*try:*/ { - /* "aiohttp/_http_parser.pyx":680 + /* "aiohttp/_http_parser.pyx":686 * cdef HttpParser pyparser = parser.data * try: * if length > pyparser._max_line_size: # <<<<<<<<<<<<<< @@ -15537,26 +15704,26 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_url(llhttp_t *__pyx_v_parser, c __pyx_t_5 = (__pyx_v_length > __pyx_v_pyparser->_max_line_size); if (unlikely(__pyx_t_5)) { - /* "aiohttp/_http_parser.pyx":681 + /* "aiohttp/_http_parser.pyx":687 * try: * if length > pyparser._max_line_size: * raise LineTooLong( # <<<<<<<<<<<<<< * 'Status line is too long', pyparser._max_line_size, length) * extend(pyparser._buf, at, length) */ - __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_LineTooLong); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 681, __pyx_L3_error) + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_LineTooLong); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 687, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_6); - /* "aiohttp/_http_parser.pyx":682 + /* "aiohttp/_http_parser.pyx":688 * if length > pyparser._max_line_size: * raise LineTooLong( * 'Status line is too long', pyparser._max_line_size, length) # <<<<<<<<<<<<<< * extend(pyparser._buf, at, length) * except BaseException as ex: */ - __pyx_t_7 = __Pyx_PyInt_FromSize_t(__pyx_v_pyparser->_max_line_size); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 682, __pyx_L3_error) + __pyx_t_7 = __Pyx_PyInt_FromSize_t(__pyx_v_pyparser->_max_line_size); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 688, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_7); - __pyx_t_8 = __Pyx_PyInt_FromSize_t(__pyx_v_length); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 682, __pyx_L3_error) + __pyx_t_8 = __Pyx_PyInt_FromSize_t(__pyx_v_length); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 688, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_8); __pyx_t_9 = NULL; __pyx_t_10 = 0; @@ -15578,15 +15745,15 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_url(llhttp_t *__pyx_v_parser, c __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 681, __pyx_L3_error) + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 687, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; } __Pyx_Raise(__pyx_t_1, 0, 0, 0); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(0, 681, __pyx_L3_error) + __PYX_ERR(0, 687, __pyx_L3_error) - /* "aiohttp/_http_parser.pyx":680 + /* "aiohttp/_http_parser.pyx":686 * cdef HttpParser pyparser = parser.data * try: * if length > pyparser._max_line_size: # <<<<<<<<<<<<<< @@ -15595,7 +15762,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_url(llhttp_t *__pyx_v_parser, c */ } - /* "aiohttp/_http_parser.pyx":683 + /* "aiohttp/_http_parser.pyx":689 * raise LineTooLong( * 'Status line is too long', pyparser._max_line_size, length) * extend(pyparser._buf, at, length) # <<<<<<<<<<<<<< @@ -15604,12 +15771,12 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_url(llhttp_t *__pyx_v_parser, c */ __pyx_t_1 = __pyx_v_pyparser->_buf; __Pyx_INCREF(__pyx_t_1); - __pyx_t_6 = __pyx_f_7aiohttp_12_http_parser_extend(__pyx_t_1, __pyx_v_at, __pyx_v_length); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 683, __pyx_L3_error) + __pyx_t_6 = __pyx_f_7aiohttp_12_http_parser_extend(__pyx_t_1, __pyx_v_at, __pyx_v_length); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 689, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - /* "aiohttp/_http_parser.pyx":679 + /* "aiohttp/_http_parser.pyx":685 * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< @@ -15618,7 +15785,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_url(llhttp_t *__pyx_v_parser, c */ } - /* "aiohttp/_http_parser.pyx":688 + /* "aiohttp/_http_parser.pyx":694 * return -1 * else: * return 0 # <<<<<<<<<<<<<< @@ -15636,7 +15803,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_url(llhttp_t *__pyx_v_parser, c __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; - /* "aiohttp/_http_parser.pyx":684 + /* "aiohttp/_http_parser.pyx":690 * 'Status line is too long', pyparser._max_line_size, length) * extend(pyparser._buf, at, length) * except BaseException as ex: # <<<<<<<<<<<<<< @@ -15646,7 +15813,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_url(llhttp_t *__pyx_v_parser, c __pyx_t_10 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); if (__pyx_t_10) { __Pyx_AddTraceback("aiohttp._http_parser.cb_on_url", __pyx_clineno, __pyx_lineno, __pyx_filename); - if (__Pyx_GetException(&__pyx_t_6, &__pyx_t_1, &__pyx_t_8) < 0) __PYX_ERR(0, 684, __pyx_L5_except_error) + if (__Pyx_GetException(&__pyx_t_6, &__pyx_t_1, &__pyx_t_8) < 0) __PYX_ERR(0, 690, __pyx_L5_except_error) __Pyx_XGOTREF(__pyx_t_6); __Pyx_XGOTREF(__pyx_t_1); __Pyx_XGOTREF(__pyx_t_8); @@ -15654,7 +15821,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_url(llhttp_t *__pyx_v_parser, c __pyx_v_ex = __pyx_t_1; /*try:*/ { - /* "aiohttp/_http_parser.pyx":685 + /* "aiohttp/_http_parser.pyx":691 * extend(pyparser._buf, at, length) * except BaseException as ex: * pyparser._last_error = ex # <<<<<<<<<<<<<< @@ -15667,7 +15834,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_url(llhttp_t *__pyx_v_parser, c __Pyx_DECREF(__pyx_v_pyparser->_last_error); __pyx_v_pyparser->_last_error = __pyx_v_ex; - /* "aiohttp/_http_parser.pyx":686 + /* "aiohttp/_http_parser.pyx":692 * except BaseException as ex: * pyparser._last_error = ex * return -1 # <<<<<<<<<<<<<< @@ -15681,7 +15848,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_url(llhttp_t *__pyx_v_parser, c goto __pyx_L14_return; } - /* "aiohttp/_http_parser.pyx":684 + /* "aiohttp/_http_parser.pyx":690 * 'Status line is too long', pyparser._max_line_size, length) * extend(pyparser._buf, at, length) * except BaseException as ex: # <<<<<<<<<<<<<< @@ -15699,7 +15866,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_url(llhttp_t *__pyx_v_parser, c } goto __pyx_L5_except_error; - /* "aiohttp/_http_parser.pyx":679 + /* "aiohttp/_http_parser.pyx":685 * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< @@ -15720,7 +15887,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_url(llhttp_t *__pyx_v_parser, c goto __pyx_L0; } - /* "aiohttp/_http_parser.pyx":676 + /* "aiohttp/_http_parser.pyx":682 * * * cdef int cb_on_url(cparser.llhttp_t* parser, # <<<<<<<<<<<<<< @@ -15744,7 +15911,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_url(llhttp_t *__pyx_v_parser, c return __pyx_r; } -/* "aiohttp/_http_parser.pyx":691 +/* "aiohttp/_http_parser.pyx":697 * * * cdef int cb_on_status(cparser.llhttp_t* parser, # <<<<<<<<<<<<<< @@ -15772,7 +15939,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_status(llhttp_t *__pyx_v_parser int __pyx_clineno = 0; __Pyx_RefNannySetupContext("cb_on_status", 1); - /* "aiohttp/_http_parser.pyx":693 + /* "aiohttp/_http_parser.pyx":699 * cdef int cb_on_status(cparser.llhttp_t* parser, * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< @@ -15784,7 +15951,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_status(llhttp_t *__pyx_v_parser __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":695 + /* "aiohttp/_http_parser.pyx":701 * cdef HttpParser pyparser = parser.data * cdef str reason * try: # <<<<<<<<<<<<<< @@ -15800,7 +15967,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_status(llhttp_t *__pyx_v_parser __Pyx_XGOTREF(__pyx_t_4); /*try:*/ { - /* "aiohttp/_http_parser.pyx":696 + /* "aiohttp/_http_parser.pyx":702 * cdef str reason * try: * if length > pyparser._max_line_size: # <<<<<<<<<<<<<< @@ -15810,26 +15977,26 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_status(llhttp_t *__pyx_v_parser __pyx_t_5 = (__pyx_v_length > __pyx_v_pyparser->_max_line_size); if (unlikely(__pyx_t_5)) { - /* "aiohttp/_http_parser.pyx":697 + /* "aiohttp/_http_parser.pyx":703 * try: * if length > pyparser._max_line_size: * raise LineTooLong( # <<<<<<<<<<<<<< * 'Status line is too long', pyparser._max_line_size, length) * extend(pyparser._buf, at, length) */ - __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_LineTooLong); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 697, __pyx_L3_error) + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_LineTooLong); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 703, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_6); - /* "aiohttp/_http_parser.pyx":698 + /* "aiohttp/_http_parser.pyx":704 * if length > pyparser._max_line_size: * raise LineTooLong( * 'Status line is too long', pyparser._max_line_size, length) # <<<<<<<<<<<<<< * extend(pyparser._buf, at, length) * except BaseException as ex: */ - __pyx_t_7 = __Pyx_PyInt_FromSize_t(__pyx_v_pyparser->_max_line_size); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 698, __pyx_L3_error) + __pyx_t_7 = __Pyx_PyInt_FromSize_t(__pyx_v_pyparser->_max_line_size); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 704, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_7); - __pyx_t_8 = __Pyx_PyInt_FromSize_t(__pyx_v_length); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 698, __pyx_L3_error) + __pyx_t_8 = __Pyx_PyInt_FromSize_t(__pyx_v_length); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 704, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_8); __pyx_t_9 = NULL; __pyx_t_10 = 0; @@ -15851,15 +16018,15 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_status(llhttp_t *__pyx_v_parser __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 697, __pyx_L3_error) + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 703, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; } __Pyx_Raise(__pyx_t_1, 0, 0, 0); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(0, 697, __pyx_L3_error) + __PYX_ERR(0, 703, __pyx_L3_error) - /* "aiohttp/_http_parser.pyx":696 + /* "aiohttp/_http_parser.pyx":702 * cdef str reason * try: * if length > pyparser._max_line_size: # <<<<<<<<<<<<<< @@ -15868,7 +16035,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_status(llhttp_t *__pyx_v_parser */ } - /* "aiohttp/_http_parser.pyx":699 + /* "aiohttp/_http_parser.pyx":705 * raise LineTooLong( * 'Status line is too long', pyparser._max_line_size, length) * extend(pyparser._buf, at, length) # <<<<<<<<<<<<<< @@ -15877,12 +16044,12 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_status(llhttp_t *__pyx_v_parser */ __pyx_t_1 = __pyx_v_pyparser->_buf; __Pyx_INCREF(__pyx_t_1); - __pyx_t_6 = __pyx_f_7aiohttp_12_http_parser_extend(__pyx_t_1, __pyx_v_at, __pyx_v_length); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 699, __pyx_L3_error) + __pyx_t_6 = __pyx_f_7aiohttp_12_http_parser_extend(__pyx_t_1, __pyx_v_at, __pyx_v_length); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 705, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - /* "aiohttp/_http_parser.pyx":695 + /* "aiohttp/_http_parser.pyx":701 * cdef HttpParser pyparser = parser.data * cdef str reason * try: # <<<<<<<<<<<<<< @@ -15891,7 +16058,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_status(llhttp_t *__pyx_v_parser */ } - /* "aiohttp/_http_parser.pyx":704 + /* "aiohttp/_http_parser.pyx":710 * return -1 * else: * return 0 # <<<<<<<<<<<<<< @@ -15909,7 +16076,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_status(llhttp_t *__pyx_v_parser __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; - /* "aiohttp/_http_parser.pyx":700 + /* "aiohttp/_http_parser.pyx":706 * 'Status line is too long', pyparser._max_line_size, length) * extend(pyparser._buf, at, length) * except BaseException as ex: # <<<<<<<<<<<<<< @@ -15919,7 +16086,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_status(llhttp_t *__pyx_v_parser __pyx_t_10 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); if (__pyx_t_10) { __Pyx_AddTraceback("aiohttp._http_parser.cb_on_status", __pyx_clineno, __pyx_lineno, __pyx_filename); - if (__Pyx_GetException(&__pyx_t_6, &__pyx_t_1, &__pyx_t_8) < 0) __PYX_ERR(0, 700, __pyx_L5_except_error) + if (__Pyx_GetException(&__pyx_t_6, &__pyx_t_1, &__pyx_t_8) < 0) __PYX_ERR(0, 706, __pyx_L5_except_error) __Pyx_XGOTREF(__pyx_t_6); __Pyx_XGOTREF(__pyx_t_1); __Pyx_XGOTREF(__pyx_t_8); @@ -15927,7 +16094,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_status(llhttp_t *__pyx_v_parser __pyx_v_ex = __pyx_t_1; /*try:*/ { - /* "aiohttp/_http_parser.pyx":701 + /* "aiohttp/_http_parser.pyx":707 * extend(pyparser._buf, at, length) * except BaseException as ex: * pyparser._last_error = ex # <<<<<<<<<<<<<< @@ -15940,7 +16107,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_status(llhttp_t *__pyx_v_parser __Pyx_DECREF(__pyx_v_pyparser->_last_error); __pyx_v_pyparser->_last_error = __pyx_v_ex; - /* "aiohttp/_http_parser.pyx":702 + /* "aiohttp/_http_parser.pyx":708 * except BaseException as ex: * pyparser._last_error = ex * return -1 # <<<<<<<<<<<<<< @@ -15954,7 +16121,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_status(llhttp_t *__pyx_v_parser goto __pyx_L14_return; } - /* "aiohttp/_http_parser.pyx":700 + /* "aiohttp/_http_parser.pyx":706 * 'Status line is too long', pyparser._max_line_size, length) * extend(pyparser._buf, at, length) * except BaseException as ex: # <<<<<<<<<<<<<< @@ -15972,7 +16139,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_status(llhttp_t *__pyx_v_parser } goto __pyx_L5_except_error; - /* "aiohttp/_http_parser.pyx":695 + /* "aiohttp/_http_parser.pyx":701 * cdef HttpParser pyparser = parser.data * cdef str reason * try: # <<<<<<<<<<<<<< @@ -15993,7 +16160,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_status(llhttp_t *__pyx_v_parser goto __pyx_L0; } - /* "aiohttp/_http_parser.pyx":691 + /* "aiohttp/_http_parser.pyx":697 * * * cdef int cb_on_status(cparser.llhttp_t* parser, # <<<<<<<<<<<<<< @@ -16017,7 +16184,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_status(llhttp_t *__pyx_v_parser return __pyx_r; } -/* "aiohttp/_http_parser.pyx":707 +/* "aiohttp/_http_parser.pyx":713 * * * cdef int cb_on_header_field(cparser.llhttp_t* parser, # <<<<<<<<<<<<<< @@ -16047,7 +16214,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_field(llhttp_t *__pyx_v_ int __pyx_clineno = 0; __Pyx_RefNannySetupContext("cb_on_header_field", 1); - /* "aiohttp/_http_parser.pyx":709 + /* "aiohttp/_http_parser.pyx":715 * cdef int cb_on_header_field(cparser.llhttp_t* parser, * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< @@ -16059,7 +16226,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_field(llhttp_t *__pyx_v_ __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":711 + /* "aiohttp/_http_parser.pyx":717 * cdef HttpParser pyparser = parser.data * cdef Py_ssize_t size * try: # <<<<<<<<<<<<<< @@ -16075,18 +16242,18 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_field(llhttp_t *__pyx_v_ __Pyx_XGOTREF(__pyx_t_4); /*try:*/ { - /* "aiohttp/_http_parser.pyx":712 + /* "aiohttp/_http_parser.pyx":718 * cdef Py_ssize_t size * try: * pyparser._on_status_complete() # <<<<<<<<<<<<<< * size = len(pyparser._raw_name) + length * if size > pyparser._max_field_size: */ - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_status_complete(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 712, __pyx_L3_error) + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_status_complete(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 718, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":713 + /* "aiohttp/_http_parser.pyx":719 * try: * pyparser._on_status_complete() * size = len(pyparser._raw_name) + length # <<<<<<<<<<<<<< @@ -16097,13 +16264,13 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_field(llhttp_t *__pyx_v_ __Pyx_INCREF(__pyx_t_1); if (unlikely(__pyx_t_1 == Py_None)) { PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); - __PYX_ERR(0, 713, __pyx_L3_error) + __PYX_ERR(0, 719, __pyx_L3_error) } - __pyx_t_5 = __Pyx_PyByteArray_GET_SIZE(__pyx_t_1); if (unlikely(__pyx_t_5 == ((Py_ssize_t)-1))) __PYX_ERR(0, 713, __pyx_L3_error) + __pyx_t_5 = __Pyx_PyByteArray_GET_SIZE(__pyx_t_1); if (unlikely(__pyx_t_5 == ((Py_ssize_t)-1))) __PYX_ERR(0, 719, __pyx_L3_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_v_size = (__pyx_t_5 + __pyx_v_length); - /* "aiohttp/_http_parser.pyx":714 + /* "aiohttp/_http_parser.pyx":720 * pyparser._on_status_complete() * size = len(pyparser._raw_name) + length * if size > pyparser._max_field_size: # <<<<<<<<<<<<<< @@ -16113,26 +16280,26 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_field(llhttp_t *__pyx_v_ __pyx_t_6 = (__pyx_v_size > __pyx_v_pyparser->_max_field_size); if (unlikely(__pyx_t_6)) { - /* "aiohttp/_http_parser.pyx":715 + /* "aiohttp/_http_parser.pyx":721 * size = len(pyparser._raw_name) + length * if size > pyparser._max_field_size: * raise LineTooLong( # <<<<<<<<<<<<<< * 'Header name is too long', pyparser._max_field_size, size) * pyparser._on_header_field(at, length) */ - __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_LineTooLong); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 715, __pyx_L3_error) + __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_LineTooLong); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 721, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_7); - /* "aiohttp/_http_parser.pyx":716 + /* "aiohttp/_http_parser.pyx":722 * if size > pyparser._max_field_size: * raise LineTooLong( * 'Header name is too long', pyparser._max_field_size, size) # <<<<<<<<<<<<<< * pyparser._on_header_field(at, length) * except BaseException as ex: */ - __pyx_t_8 = __Pyx_PyInt_FromSize_t(__pyx_v_pyparser->_max_field_size); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 716, __pyx_L3_error) + __pyx_t_8 = __Pyx_PyInt_FromSize_t(__pyx_v_pyparser->_max_field_size); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 722, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_8); - __pyx_t_9 = PyInt_FromSsize_t(__pyx_v_size); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 716, __pyx_L3_error) + __pyx_t_9 = PyInt_FromSsize_t(__pyx_v_size); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 722, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_9); __pyx_t_10 = NULL; __pyx_t_11 = 0; @@ -16154,15 +16321,15 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_field(llhttp_t *__pyx_v_ __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 715, __pyx_L3_error) + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 721, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; } __Pyx_Raise(__pyx_t_1, 0, 0, 0); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(0, 715, __pyx_L3_error) + __PYX_ERR(0, 721, __pyx_L3_error) - /* "aiohttp/_http_parser.pyx":714 + /* "aiohttp/_http_parser.pyx":720 * pyparser._on_status_complete() * size = len(pyparser._raw_name) + length * if size > pyparser._max_field_size: # <<<<<<<<<<<<<< @@ -16171,18 +16338,18 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_field(llhttp_t *__pyx_v_ */ } - /* "aiohttp/_http_parser.pyx":717 + /* "aiohttp/_http_parser.pyx":723 * raise LineTooLong( * 'Header name is too long', pyparser._max_field_size, size) * pyparser._on_header_field(at, length) # <<<<<<<<<<<<<< * except BaseException as ex: * pyparser._last_error = ex */ - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_header_field(__pyx_v_pyparser, __pyx_v_at, __pyx_v_length); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 717, __pyx_L3_error) + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_header_field(__pyx_v_pyparser, __pyx_v_at, __pyx_v_length); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 723, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":711 + /* "aiohttp/_http_parser.pyx":717 * cdef HttpParser pyparser = parser.data * cdef Py_ssize_t size * try: # <<<<<<<<<<<<<< @@ -16191,7 +16358,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_field(llhttp_t *__pyx_v_ */ } - /* "aiohttp/_http_parser.pyx":722 + /* "aiohttp/_http_parser.pyx":728 * return -1 * else: * return 0 # <<<<<<<<<<<<<< @@ -16209,7 +16376,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_field(llhttp_t *__pyx_v_ __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; - /* "aiohttp/_http_parser.pyx":718 + /* "aiohttp/_http_parser.pyx":724 * 'Header name is too long', pyparser._max_field_size, size) * pyparser._on_header_field(at, length) * except BaseException as ex: # <<<<<<<<<<<<<< @@ -16219,7 +16386,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_field(llhttp_t *__pyx_v_ __pyx_t_11 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); if (__pyx_t_11) { __Pyx_AddTraceback("aiohttp._http_parser.cb_on_header_field", __pyx_clineno, __pyx_lineno, __pyx_filename); - if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_7, &__pyx_t_9) < 0) __PYX_ERR(0, 718, __pyx_L5_except_error) + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_7, &__pyx_t_9) < 0) __PYX_ERR(0, 724, __pyx_L5_except_error) __Pyx_XGOTREF(__pyx_t_1); __Pyx_XGOTREF(__pyx_t_7); __Pyx_XGOTREF(__pyx_t_9); @@ -16227,7 +16394,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_field(llhttp_t *__pyx_v_ __pyx_v_ex = __pyx_t_7; /*try:*/ { - /* "aiohttp/_http_parser.pyx":719 + /* "aiohttp/_http_parser.pyx":725 * pyparser._on_header_field(at, length) * except BaseException as ex: * pyparser._last_error = ex # <<<<<<<<<<<<<< @@ -16240,7 +16407,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_field(llhttp_t *__pyx_v_ __Pyx_DECREF(__pyx_v_pyparser->_last_error); __pyx_v_pyparser->_last_error = __pyx_v_ex; - /* "aiohttp/_http_parser.pyx":720 + /* "aiohttp/_http_parser.pyx":726 * except BaseException as ex: * pyparser._last_error = ex * return -1 # <<<<<<<<<<<<<< @@ -16254,7 +16421,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_field(llhttp_t *__pyx_v_ goto __pyx_L14_return; } - /* "aiohttp/_http_parser.pyx":718 + /* "aiohttp/_http_parser.pyx":724 * 'Header name is too long', pyparser._max_field_size, size) * pyparser._on_header_field(at, length) * except BaseException as ex: # <<<<<<<<<<<<<< @@ -16272,7 +16439,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_field(llhttp_t *__pyx_v_ } goto __pyx_L5_except_error; - /* "aiohttp/_http_parser.pyx":711 + /* "aiohttp/_http_parser.pyx":717 * cdef HttpParser pyparser = parser.data * cdef Py_ssize_t size * try: # <<<<<<<<<<<<<< @@ -16293,7 +16460,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_field(llhttp_t *__pyx_v_ goto __pyx_L0; } - /* "aiohttp/_http_parser.pyx":707 + /* "aiohttp/_http_parser.pyx":713 * * * cdef int cb_on_header_field(cparser.llhttp_t* parser, # <<<<<<<<<<<<<< @@ -16317,7 +16484,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_field(llhttp_t *__pyx_v_ return __pyx_r; } -/* "aiohttp/_http_parser.pyx":725 +/* "aiohttp/_http_parser.pyx":731 * * * cdef int cb_on_header_value(cparser.llhttp_t* parser, # <<<<<<<<<<<<<< @@ -16347,7 +16514,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_value(llhttp_t *__pyx_v_ int __pyx_clineno = 0; __Pyx_RefNannySetupContext("cb_on_header_value", 1); - /* "aiohttp/_http_parser.pyx":727 + /* "aiohttp/_http_parser.pyx":733 * cdef int cb_on_header_value(cparser.llhttp_t* parser, * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< @@ -16359,7 +16526,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_value(llhttp_t *__pyx_v_ __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":729 + /* "aiohttp/_http_parser.pyx":735 * cdef HttpParser pyparser = parser.data * cdef Py_ssize_t size * try: # <<<<<<<<<<<<<< @@ -16375,7 +16542,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_value(llhttp_t *__pyx_v_ __Pyx_XGOTREF(__pyx_t_4); /*try:*/ { - /* "aiohttp/_http_parser.pyx":730 + /* "aiohttp/_http_parser.pyx":736 * cdef Py_ssize_t size * try: * size = len(pyparser._raw_value) + length # <<<<<<<<<<<<<< @@ -16386,13 +16553,13 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_value(llhttp_t *__pyx_v_ __Pyx_INCREF(__pyx_t_1); if (unlikely(__pyx_t_1 == Py_None)) { PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); - __PYX_ERR(0, 730, __pyx_L3_error) + __PYX_ERR(0, 736, __pyx_L3_error) } - __pyx_t_5 = __Pyx_PyByteArray_GET_SIZE(__pyx_t_1); if (unlikely(__pyx_t_5 == ((Py_ssize_t)-1))) __PYX_ERR(0, 730, __pyx_L3_error) + __pyx_t_5 = __Pyx_PyByteArray_GET_SIZE(__pyx_t_1); if (unlikely(__pyx_t_5 == ((Py_ssize_t)-1))) __PYX_ERR(0, 736, __pyx_L3_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_v_size = (__pyx_t_5 + __pyx_v_length); - /* "aiohttp/_http_parser.pyx":731 + /* "aiohttp/_http_parser.pyx":737 * try: * size = len(pyparser._raw_value) + length * if size > pyparser._max_field_size: # <<<<<<<<<<<<<< @@ -16402,26 +16569,26 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_value(llhttp_t *__pyx_v_ __pyx_t_6 = (__pyx_v_size > __pyx_v_pyparser->_max_field_size); if (unlikely(__pyx_t_6)) { - /* "aiohttp/_http_parser.pyx":732 + /* "aiohttp/_http_parser.pyx":738 * size = len(pyparser._raw_value) + length * if size > pyparser._max_field_size: * raise LineTooLong( # <<<<<<<<<<<<<< * 'Header value is too long', pyparser._max_field_size, size) * pyparser._on_header_value(at, length) */ - __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_LineTooLong); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 732, __pyx_L3_error) + __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_LineTooLong); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 738, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_7); - /* "aiohttp/_http_parser.pyx":733 + /* "aiohttp/_http_parser.pyx":739 * if size > pyparser._max_field_size: * raise LineTooLong( * 'Header value is too long', pyparser._max_field_size, size) # <<<<<<<<<<<<<< * pyparser._on_header_value(at, length) * except BaseException as ex: */ - __pyx_t_8 = __Pyx_PyInt_FromSize_t(__pyx_v_pyparser->_max_field_size); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 733, __pyx_L3_error) + __pyx_t_8 = __Pyx_PyInt_FromSize_t(__pyx_v_pyparser->_max_field_size); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 739, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_8); - __pyx_t_9 = PyInt_FromSsize_t(__pyx_v_size); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 733, __pyx_L3_error) + __pyx_t_9 = PyInt_FromSsize_t(__pyx_v_size); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 739, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_9); __pyx_t_10 = NULL; __pyx_t_11 = 0; @@ -16443,15 +16610,15 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_value(llhttp_t *__pyx_v_ __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 732, __pyx_L3_error) + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 738, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; } __Pyx_Raise(__pyx_t_1, 0, 0, 0); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(0, 732, __pyx_L3_error) + __PYX_ERR(0, 738, __pyx_L3_error) - /* "aiohttp/_http_parser.pyx":731 + /* "aiohttp/_http_parser.pyx":737 * try: * size = len(pyparser._raw_value) + length * if size > pyparser._max_field_size: # <<<<<<<<<<<<<< @@ -16460,18 +16627,18 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_value(llhttp_t *__pyx_v_ */ } - /* "aiohttp/_http_parser.pyx":734 + /* "aiohttp/_http_parser.pyx":740 * raise LineTooLong( * 'Header value is too long', pyparser._max_field_size, size) * pyparser._on_header_value(at, length) # <<<<<<<<<<<<<< * except BaseException as ex: * pyparser._last_error = ex */ - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_header_value(__pyx_v_pyparser, __pyx_v_at, __pyx_v_length); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 734, __pyx_L3_error) + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_header_value(__pyx_v_pyparser, __pyx_v_at, __pyx_v_length); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 740, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":729 + /* "aiohttp/_http_parser.pyx":735 * cdef HttpParser pyparser = parser.data * cdef Py_ssize_t size * try: # <<<<<<<<<<<<<< @@ -16480,7 +16647,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_value(llhttp_t *__pyx_v_ */ } - /* "aiohttp/_http_parser.pyx":739 + /* "aiohttp/_http_parser.pyx":745 * return -1 * else: * return 0 # <<<<<<<<<<<<<< @@ -16498,7 +16665,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_value(llhttp_t *__pyx_v_ __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; - /* "aiohttp/_http_parser.pyx":735 + /* "aiohttp/_http_parser.pyx":741 * 'Header value is too long', pyparser._max_field_size, size) * pyparser._on_header_value(at, length) * except BaseException as ex: # <<<<<<<<<<<<<< @@ -16508,7 +16675,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_value(llhttp_t *__pyx_v_ __pyx_t_11 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); if (__pyx_t_11) { __Pyx_AddTraceback("aiohttp._http_parser.cb_on_header_value", __pyx_clineno, __pyx_lineno, __pyx_filename); - if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_7, &__pyx_t_9) < 0) __PYX_ERR(0, 735, __pyx_L5_except_error) + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_7, &__pyx_t_9) < 0) __PYX_ERR(0, 741, __pyx_L5_except_error) __Pyx_XGOTREF(__pyx_t_1); __Pyx_XGOTREF(__pyx_t_7); __Pyx_XGOTREF(__pyx_t_9); @@ -16516,7 +16683,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_value(llhttp_t *__pyx_v_ __pyx_v_ex = __pyx_t_7; /*try:*/ { - /* "aiohttp/_http_parser.pyx":736 + /* "aiohttp/_http_parser.pyx":742 * pyparser._on_header_value(at, length) * except BaseException as ex: * pyparser._last_error = ex # <<<<<<<<<<<<<< @@ -16529,7 +16696,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_value(llhttp_t *__pyx_v_ __Pyx_DECREF(__pyx_v_pyparser->_last_error); __pyx_v_pyparser->_last_error = __pyx_v_ex; - /* "aiohttp/_http_parser.pyx":737 + /* "aiohttp/_http_parser.pyx":743 * except BaseException as ex: * pyparser._last_error = ex * return -1 # <<<<<<<<<<<<<< @@ -16543,7 +16710,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_value(llhttp_t *__pyx_v_ goto __pyx_L14_return; } - /* "aiohttp/_http_parser.pyx":735 + /* "aiohttp/_http_parser.pyx":741 * 'Header value is too long', pyparser._max_field_size, size) * pyparser._on_header_value(at, length) * except BaseException as ex: # <<<<<<<<<<<<<< @@ -16561,7 +16728,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_value(llhttp_t *__pyx_v_ } goto __pyx_L5_except_error; - /* "aiohttp/_http_parser.pyx":729 + /* "aiohttp/_http_parser.pyx":735 * cdef HttpParser pyparser = parser.data * cdef Py_ssize_t size * try: # <<<<<<<<<<<<<< @@ -16582,7 +16749,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_value(llhttp_t *__pyx_v_ goto __pyx_L0; } - /* "aiohttp/_http_parser.pyx":725 + /* "aiohttp/_http_parser.pyx":731 * * * cdef int cb_on_header_value(cparser.llhttp_t* parser, # <<<<<<<<<<<<<< @@ -16606,7 +16773,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_value(llhttp_t *__pyx_v_ return __pyx_r; } -/* "aiohttp/_http_parser.pyx":742 +/* "aiohttp/_http_parser.pyx":748 * * * cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1: # <<<<<<<<<<<<<< @@ -16633,7 +16800,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_headers_complete(llhttp_t *__py int __pyx_clineno = 0; __Pyx_RefNannySetupContext("cb_on_headers_complete", 1); - /* "aiohttp/_http_parser.pyx":743 + /* "aiohttp/_http_parser.pyx":749 * * cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1: * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< @@ -16645,7 +16812,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_headers_complete(llhttp_t *__py __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":744 + /* "aiohttp/_http_parser.pyx":750 * cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< @@ -16661,29 +16828,29 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_headers_complete(llhttp_t *__py __Pyx_XGOTREF(__pyx_t_4); /*try:*/ { - /* "aiohttp/_http_parser.pyx":745 + /* "aiohttp/_http_parser.pyx":751 * cdef HttpParser pyparser = parser.data * try: * pyparser._on_status_complete() # <<<<<<<<<<<<<< * pyparser._on_headers_complete() * except BaseException as exc: */ - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_status_complete(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 745, __pyx_L3_error) + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_status_complete(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 751, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":746 + /* "aiohttp/_http_parser.pyx":752 * try: * pyparser._on_status_complete() * pyparser._on_headers_complete() # <<<<<<<<<<<<<< * except BaseException as exc: * pyparser._last_error = exc */ - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_headers_complete(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 746, __pyx_L3_error) + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_headers_complete(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 752, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":744 + /* "aiohttp/_http_parser.pyx":750 * cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< @@ -16692,52 +16859,27 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_headers_complete(llhttp_t *__py */ } - /* "aiohttp/_http_parser.pyx":751 + /* "aiohttp/_http_parser.pyx":757 * return -1 * else: - * if ( # <<<<<<<<<<<<<< - * pyparser._cparser.upgrade or - * pyparser._cparser.method == cparser.HTTP_CONNECT + * if pyparser._upgraded or pyparser._cparser.method == cparser.HTTP_CONNECT: # <<<<<<<<<<<<<< + * return 2 + * else: */ /*else:*/ { - - /* "aiohttp/_http_parser.pyx":752 - * else: - * if ( - * pyparser._cparser.upgrade or # <<<<<<<<<<<<<< - * pyparser._cparser.method == cparser.HTTP_CONNECT - * ): - */ - __pyx_t_6 = (__pyx_v_pyparser->_cparser->upgrade != 0); - if (!__pyx_t_6) { + if (!__pyx_v_pyparser->_upgraded) { } else { - __pyx_t_5 = __pyx_t_6; + __pyx_t_5 = __pyx_v_pyparser->_upgraded; goto __pyx_L10_bool_binop_done; } - - /* "aiohttp/_http_parser.pyx":753 - * if ( - * pyparser._cparser.upgrade or - * pyparser._cparser.method == cparser.HTTP_CONNECT # <<<<<<<<<<<<<< - * ): - * return 2 - */ __pyx_t_6 = (__pyx_v_pyparser->_cparser->method == HTTP_CONNECT); __pyx_t_5 = __pyx_t_6; __pyx_L10_bool_binop_done:; - - /* "aiohttp/_http_parser.pyx":751 - * return -1 - * else: - * if ( # <<<<<<<<<<<<<< - * pyparser._cparser.upgrade or - * pyparser._cparser.method == cparser.HTTP_CONNECT - */ if (__pyx_t_5) { - /* "aiohttp/_http_parser.pyx":755 - * pyparser._cparser.method == cparser.HTTP_CONNECT - * ): + /* "aiohttp/_http_parser.pyx":758 + * else: + * if pyparser._upgraded or pyparser._cparser.method == cparser.HTTP_CONNECT: * return 2 # <<<<<<<<<<<<<< * else: * return 0 @@ -16745,16 +16887,16 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_headers_complete(llhttp_t *__py __pyx_r = 2; goto __pyx_L6_except_return; - /* "aiohttp/_http_parser.pyx":751 + /* "aiohttp/_http_parser.pyx":757 * return -1 * else: - * if ( # <<<<<<<<<<<<<< - * pyparser._cparser.upgrade or - * pyparser._cparser.method == cparser.HTTP_CONNECT + * if pyparser._upgraded or pyparser._cparser.method == cparser.HTTP_CONNECT: # <<<<<<<<<<<<<< + * return 2 + * else: */ } - /* "aiohttp/_http_parser.pyx":757 + /* "aiohttp/_http_parser.pyx":760 * return 2 * else: * return 0 # <<<<<<<<<<<<<< @@ -16769,7 +16911,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_headers_complete(llhttp_t *__py __pyx_L3_error:; __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":747 + /* "aiohttp/_http_parser.pyx":753 * pyparser._on_status_complete() * pyparser._on_headers_complete() * except BaseException as exc: # <<<<<<<<<<<<<< @@ -16779,7 +16921,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_headers_complete(llhttp_t *__py __pyx_t_7 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); if (__pyx_t_7) { __Pyx_AddTraceback("aiohttp._http_parser.cb_on_headers_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); - if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_8, &__pyx_t_9) < 0) __PYX_ERR(0, 747, __pyx_L5_except_error) + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_8, &__pyx_t_9) < 0) __PYX_ERR(0, 753, __pyx_L5_except_error) __Pyx_XGOTREF(__pyx_t_1); __Pyx_XGOTREF(__pyx_t_8); __Pyx_XGOTREF(__pyx_t_9); @@ -16787,7 +16929,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_headers_complete(llhttp_t *__py __pyx_v_exc = __pyx_t_8; /*try:*/ { - /* "aiohttp/_http_parser.pyx":748 + /* "aiohttp/_http_parser.pyx":754 * pyparser._on_headers_complete() * except BaseException as exc: * pyparser._last_error = exc # <<<<<<<<<<<<<< @@ -16800,12 +16942,12 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_headers_complete(llhttp_t *__py __Pyx_DECREF(__pyx_v_pyparser->_last_error); __pyx_v_pyparser->_last_error = __pyx_v_exc; - /* "aiohttp/_http_parser.pyx":749 + /* "aiohttp/_http_parser.pyx":755 * except BaseException as exc: * pyparser._last_error = exc * return -1 # <<<<<<<<<<<<<< * else: - * if ( + * if pyparser._upgraded or pyparser._cparser.method == cparser.HTTP_CONNECT: */ __pyx_r = -1; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; @@ -16814,7 +16956,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_headers_complete(llhttp_t *__py goto __pyx_L16_return; } - /* "aiohttp/_http_parser.pyx":747 + /* "aiohttp/_http_parser.pyx":753 * pyparser._on_status_complete() * pyparser._on_headers_complete() * except BaseException as exc: # <<<<<<<<<<<<<< @@ -16832,7 +16974,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_headers_complete(llhttp_t *__py } goto __pyx_L5_except_error; - /* "aiohttp/_http_parser.pyx":744 + /* "aiohttp/_http_parser.pyx":750 * cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< @@ -16853,7 +16995,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_headers_complete(llhttp_t *__py goto __pyx_L0; } - /* "aiohttp/_http_parser.pyx":742 + /* "aiohttp/_http_parser.pyx":748 * * * cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1: # <<<<<<<<<<<<<< @@ -16875,7 +17017,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_headers_complete(llhttp_t *__py return __pyx_r; } -/* "aiohttp/_http_parser.pyx":760 +/* "aiohttp/_http_parser.pyx":763 * * * cdef int cb_on_body(cparser.llhttp_t* parser, # <<<<<<<<<<<<<< @@ -16915,7 +17057,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_body(llhttp_t *__pyx_v_parser, int __pyx_clineno = 0; __Pyx_RefNannySetupContext("cb_on_body", 1); - /* "aiohttp/_http_parser.pyx":762 + /* "aiohttp/_http_parser.pyx":765 * cdef int cb_on_body(cparser.llhttp_t* parser, * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< @@ -16927,19 +17069,19 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_body(llhttp_t *__pyx_v_parser, __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":763 + /* "aiohttp/_http_parser.pyx":766 * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data * cdef bytes body = at[:length] # <<<<<<<<<<<<<< * try: * pyparser._payload.feed_data(body, length) */ - __pyx_t_1 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_at + 0, __pyx_v_length - 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 763, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_at + 0, __pyx_v_length - 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 766, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_v_body = ((PyObject*)__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":764 + /* "aiohttp/_http_parser.pyx":767 * cdef HttpParser pyparser = parser.data * cdef bytes body = at[:length] * try: # <<<<<<<<<<<<<< @@ -16955,16 +17097,16 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_body(llhttp_t *__pyx_v_parser, __Pyx_XGOTREF(__pyx_t_4); /*try:*/ { - /* "aiohttp/_http_parser.pyx":765 + /* "aiohttp/_http_parser.pyx":768 * cdef bytes body = at[:length] * try: * pyparser._payload.feed_data(body, length) # <<<<<<<<<<<<<< * except BaseException as underlying_exc: * reraised_exc = underlying_exc */ - __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_pyparser->_payload, __pyx_n_s_feed_data); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 765, __pyx_L3_error) + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_pyparser->_payload, __pyx_n_s_feed_data); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 768, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_5); - __pyx_t_6 = __Pyx_PyInt_FromSize_t(__pyx_v_length); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 765, __pyx_L3_error) + __pyx_t_6 = __Pyx_PyInt_FromSize_t(__pyx_v_length); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 768, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_6); __pyx_t_7 = NULL; __pyx_t_8 = 0; @@ -16985,13 +17127,13 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_body(llhttp_t *__pyx_v_parser, __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_5, __pyx_callargs+1-__pyx_t_8, 2+__pyx_t_8); __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 765, __pyx_L3_error) + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 768, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; } __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":764 + /* "aiohttp/_http_parser.pyx":767 * cdef HttpParser pyparser = parser.data * cdef bytes body = at[:length] * try: # <<<<<<<<<<<<<< @@ -17000,7 +17142,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_body(llhttp_t *__pyx_v_parser, */ } - /* "aiohttp/_http_parser.pyx":776 + /* "aiohttp/_http_parser.pyx":779 * return -1 * else: * return 0 # <<<<<<<<<<<<<< @@ -17017,7 +17159,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_body(llhttp_t *__pyx_v_parser, __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; - /* "aiohttp/_http_parser.pyx":766 + /* "aiohttp/_http_parser.pyx":769 * try: * pyparser._payload.feed_data(body, length) * except BaseException as underlying_exc: # <<<<<<<<<<<<<< @@ -17027,7 +17169,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_body(llhttp_t *__pyx_v_parser, __pyx_t_8 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); if (__pyx_t_8) { __Pyx_AddTraceback("aiohttp._http_parser.cb_on_body", __pyx_clineno, __pyx_lineno, __pyx_filename); - if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_5, &__pyx_t_6) < 0) __PYX_ERR(0, 766, __pyx_L5_except_error) + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_5, &__pyx_t_6) < 0) __PYX_ERR(0, 769, __pyx_L5_except_error) __Pyx_XGOTREF(__pyx_t_1); __Pyx_XGOTREF(__pyx_t_5); __Pyx_XGOTREF(__pyx_t_6); @@ -17035,7 +17177,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_body(llhttp_t *__pyx_v_parser, __pyx_v_underlying_exc = __pyx_t_5; /*try:*/ { - /* "aiohttp/_http_parser.pyx":767 + /* "aiohttp/_http_parser.pyx":770 * pyparser._payload.feed_data(body, length) * except BaseException as underlying_exc: * reraised_exc = underlying_exc # <<<<<<<<<<<<<< @@ -17045,7 +17187,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_body(llhttp_t *__pyx_v_parser, __Pyx_INCREF(__pyx_v_underlying_exc); __pyx_v_reraised_exc = __pyx_v_underlying_exc; - /* "aiohttp/_http_parser.pyx":768 + /* "aiohttp/_http_parser.pyx":771 * except BaseException as underlying_exc: * reraised_exc = underlying_exc * if pyparser._payload_exception is not None: # <<<<<<<<<<<<<< @@ -17055,14 +17197,14 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_body(llhttp_t *__pyx_v_parser, __pyx_t_9 = (__pyx_v_pyparser->_payload_exception != Py_None); if (__pyx_t_9) { - /* "aiohttp/_http_parser.pyx":769 + /* "aiohttp/_http_parser.pyx":772 * reraised_exc = underlying_exc * if pyparser._payload_exception is not None: * reraised_exc = pyparser._payload_exception(str(underlying_exc)) # <<<<<<<<<<<<<< * * set_exception(pyparser._payload, reraised_exc, underlying_exc) */ - __pyx_t_10 = __Pyx_PyObject_Str(__pyx_v_underlying_exc); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 769, __pyx_L14_error) + __pyx_t_10 = __Pyx_PyObject_Str(__pyx_v_underlying_exc); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 772, __pyx_L14_error) __Pyx_GOTREF(__pyx_t_10); __Pyx_INCREF(__pyx_v_pyparser->_payload_exception); __pyx_t_11 = __pyx_v_pyparser->_payload_exception; __pyx_t_12 = NULL; @@ -17084,14 +17226,14 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_body(llhttp_t *__pyx_v_parser, __pyx_t_7 = __Pyx_PyObject_FastCall(__pyx_t_11, __pyx_callargs+1-__pyx_t_8, 1+__pyx_t_8); __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 769, __pyx_L14_error) + if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 772, __pyx_L14_error) __Pyx_GOTREF(__pyx_t_7); __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; } __Pyx_DECREF_SET(__pyx_v_reraised_exc, __pyx_t_7); __pyx_t_7 = 0; - /* "aiohttp/_http_parser.pyx":768 + /* "aiohttp/_http_parser.pyx":771 * except BaseException as underlying_exc: * reraised_exc = underlying_exc * if pyparser._payload_exception is not None: # <<<<<<<<<<<<<< @@ -17100,14 +17242,14 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_body(llhttp_t *__pyx_v_parser, */ } - /* "aiohttp/_http_parser.pyx":771 + /* "aiohttp/_http_parser.pyx":774 * reraised_exc = pyparser._payload_exception(str(underlying_exc)) * * set_exception(pyparser._payload, reraised_exc, underlying_exc) # <<<<<<<<<<<<<< * * pyparser._payload_error = 1 */ - __Pyx_GetModuleGlobalName(__pyx_t_11, __pyx_n_s_set_exception); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 771, __pyx_L14_error) + __Pyx_GetModuleGlobalName(__pyx_t_11, __pyx_n_s_set_exception); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 774, __pyx_L14_error) __Pyx_GOTREF(__pyx_t_11); __pyx_t_10 = NULL; __pyx_t_8 = 0; @@ -17127,13 +17269,13 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_body(llhttp_t *__pyx_v_parser, PyObject *__pyx_callargs[4] = {__pyx_t_10, __pyx_v_pyparser->_payload, __pyx_v_reraised_exc, __pyx_v_underlying_exc}; __pyx_t_7 = __Pyx_PyObject_FastCall(__pyx_t_11, __pyx_callargs+1-__pyx_t_8, 3+__pyx_t_8); __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; - if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 771, __pyx_L14_error) + if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 774, __pyx_L14_error) __Pyx_GOTREF(__pyx_t_7); __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; } __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - /* "aiohttp/_http_parser.pyx":773 + /* "aiohttp/_http_parser.pyx":776 * set_exception(pyparser._payload, reraised_exc, underlying_exc) * * pyparser._payload_error = 1 # <<<<<<<<<<<<<< @@ -17142,7 +17284,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_body(llhttp_t *__pyx_v_parser, */ __pyx_v_pyparser->_payload_error = 1; - /* "aiohttp/_http_parser.pyx":774 + /* "aiohttp/_http_parser.pyx":777 * * pyparser._payload_error = 1 * return -1 # <<<<<<<<<<<<<< @@ -17156,7 +17298,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_body(llhttp_t *__pyx_v_parser, goto __pyx_L13_return; } - /* "aiohttp/_http_parser.pyx":766 + /* "aiohttp/_http_parser.pyx":769 * try: * pyparser._payload.feed_data(body, length) * except BaseException as underlying_exc: # <<<<<<<<<<<<<< @@ -17209,7 +17351,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_body(llhttp_t *__pyx_v_parser, } goto __pyx_L5_except_error; - /* "aiohttp/_http_parser.pyx":764 + /* "aiohttp/_http_parser.pyx":767 * cdef HttpParser pyparser = parser.data * cdef bytes body = at[:length] * try: # <<<<<<<<<<<<<< @@ -17230,7 +17372,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_body(llhttp_t *__pyx_v_parser, goto __pyx_L0; } - /* "aiohttp/_http_parser.pyx":760 + /* "aiohttp/_http_parser.pyx":763 * * * cdef int cb_on_body(cparser.llhttp_t* parser, # <<<<<<<<<<<<<< @@ -17258,7 +17400,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_body(llhttp_t *__pyx_v_parser, return __pyx_r; } -/* "aiohttp/_http_parser.pyx":779 +/* "aiohttp/_http_parser.pyx":782 * * * cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1: # <<<<<<<<<<<<<< @@ -17283,7 +17425,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_complete(llhttp_t *__py int __pyx_clineno = 0; __Pyx_RefNannySetupContext("cb_on_message_complete", 1); - /* "aiohttp/_http_parser.pyx":780 + /* "aiohttp/_http_parser.pyx":783 * * cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1: * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< @@ -17295,7 +17437,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_complete(llhttp_t *__py __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":781 + /* "aiohttp/_http_parser.pyx":784 * cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< @@ -17311,7 +17453,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_complete(llhttp_t *__py __Pyx_XGOTREF(__pyx_t_4); /*try:*/ { - /* "aiohttp/_http_parser.pyx":782 + /* "aiohttp/_http_parser.pyx":785 * cdef HttpParser pyparser = parser.data * try: * pyparser._started = False # <<<<<<<<<<<<<< @@ -17320,18 +17462,18 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_complete(llhttp_t *__py */ __pyx_v_pyparser->_started = 0; - /* "aiohttp/_http_parser.pyx":783 + /* "aiohttp/_http_parser.pyx":786 * try: * pyparser._started = False * pyparser._on_message_complete() # <<<<<<<<<<<<<< * except BaseException as exc: * pyparser._last_error = exc */ - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_message_complete(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 783, __pyx_L3_error) + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_message_complete(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 786, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":781 + /* "aiohttp/_http_parser.pyx":784 * cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< @@ -17340,7 +17482,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_complete(llhttp_t *__py */ } - /* "aiohttp/_http_parser.pyx":788 + /* "aiohttp/_http_parser.pyx":791 * return -1 * else: * return 0 # <<<<<<<<<<<<<< @@ -17354,7 +17496,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_complete(llhttp_t *__py __pyx_L3_error:; __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":784 + /* "aiohttp/_http_parser.pyx":787 * pyparser._started = False * pyparser._on_message_complete() * except BaseException as exc: # <<<<<<<<<<<<<< @@ -17364,7 +17506,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_complete(llhttp_t *__py __pyx_t_5 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); if (__pyx_t_5) { __Pyx_AddTraceback("aiohttp._http_parser.cb_on_message_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); - if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(0, 784, __pyx_L5_except_error) + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(0, 787, __pyx_L5_except_error) __Pyx_XGOTREF(__pyx_t_1); __Pyx_XGOTREF(__pyx_t_6); __Pyx_XGOTREF(__pyx_t_7); @@ -17372,7 +17514,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_complete(llhttp_t *__py __pyx_v_exc = __pyx_t_6; /*try:*/ { - /* "aiohttp/_http_parser.pyx":785 + /* "aiohttp/_http_parser.pyx":788 * pyparser._on_message_complete() * except BaseException as exc: * pyparser._last_error = exc # <<<<<<<<<<<<<< @@ -17385,7 +17527,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_complete(llhttp_t *__py __Pyx_DECREF(__pyx_v_pyparser->_last_error); __pyx_v_pyparser->_last_error = __pyx_v_exc; - /* "aiohttp/_http_parser.pyx":786 + /* "aiohttp/_http_parser.pyx":789 * except BaseException as exc: * pyparser._last_error = exc * return -1 # <<<<<<<<<<<<<< @@ -17399,7 +17541,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_complete(llhttp_t *__py goto __pyx_L13_return; } - /* "aiohttp/_http_parser.pyx":784 + /* "aiohttp/_http_parser.pyx":787 * pyparser._started = False * pyparser._on_message_complete() * except BaseException as exc: # <<<<<<<<<<<<<< @@ -17417,7 +17559,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_complete(llhttp_t *__py } goto __pyx_L5_except_error; - /* "aiohttp/_http_parser.pyx":781 + /* "aiohttp/_http_parser.pyx":784 * cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< @@ -17438,7 +17580,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_complete(llhttp_t *__py goto __pyx_L0; } - /* "aiohttp/_http_parser.pyx":779 + /* "aiohttp/_http_parser.pyx":782 * * * cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1: # <<<<<<<<<<<<<< @@ -17460,7 +17602,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_complete(llhttp_t *__py return __pyx_r; } -/* "aiohttp/_http_parser.pyx":791 +/* "aiohttp/_http_parser.pyx":794 * * * cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1: # <<<<<<<<<<<<<< @@ -17485,7 +17627,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_header(llhttp_t *__pyx_v_ int __pyx_clineno = 0; __Pyx_RefNannySetupContext("cb_on_chunk_header", 1); - /* "aiohttp/_http_parser.pyx":792 + /* "aiohttp/_http_parser.pyx":795 * * cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1: * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< @@ -17497,7 +17639,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_header(llhttp_t *__pyx_v_ __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":793 + /* "aiohttp/_http_parser.pyx":796 * cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< @@ -17513,18 +17655,18 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_header(llhttp_t *__pyx_v_ __Pyx_XGOTREF(__pyx_t_4); /*try:*/ { - /* "aiohttp/_http_parser.pyx":794 + /* "aiohttp/_http_parser.pyx":797 * cdef HttpParser pyparser = parser.data * try: * pyparser._on_chunk_header() # <<<<<<<<<<<<<< * except BaseException as exc: * pyparser._last_error = exc */ - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_chunk_header(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 794, __pyx_L3_error) + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_chunk_header(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 797, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":793 + /* "aiohttp/_http_parser.pyx":796 * cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< @@ -17533,7 +17675,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_header(llhttp_t *__pyx_v_ */ } - /* "aiohttp/_http_parser.pyx":799 + /* "aiohttp/_http_parser.pyx":802 * return -1 * else: * return 0 # <<<<<<<<<<<<<< @@ -17547,7 +17689,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_header(llhttp_t *__pyx_v_ __pyx_L3_error:; __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":795 + /* "aiohttp/_http_parser.pyx":798 * try: * pyparser._on_chunk_header() * except BaseException as exc: # <<<<<<<<<<<<<< @@ -17557,7 +17699,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_header(llhttp_t *__pyx_v_ __pyx_t_5 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); if (__pyx_t_5) { __Pyx_AddTraceback("aiohttp._http_parser.cb_on_chunk_header", __pyx_clineno, __pyx_lineno, __pyx_filename); - if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(0, 795, __pyx_L5_except_error) + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(0, 798, __pyx_L5_except_error) __Pyx_XGOTREF(__pyx_t_1); __Pyx_XGOTREF(__pyx_t_6); __Pyx_XGOTREF(__pyx_t_7); @@ -17565,7 +17707,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_header(llhttp_t *__pyx_v_ __pyx_v_exc = __pyx_t_6; /*try:*/ { - /* "aiohttp/_http_parser.pyx":796 + /* "aiohttp/_http_parser.pyx":799 * pyparser._on_chunk_header() * except BaseException as exc: * pyparser._last_error = exc # <<<<<<<<<<<<<< @@ -17578,7 +17720,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_header(llhttp_t *__pyx_v_ __Pyx_DECREF(__pyx_v_pyparser->_last_error); __pyx_v_pyparser->_last_error = __pyx_v_exc; - /* "aiohttp/_http_parser.pyx":797 + /* "aiohttp/_http_parser.pyx":800 * except BaseException as exc: * pyparser._last_error = exc * return -1 # <<<<<<<<<<<<<< @@ -17592,7 +17734,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_header(llhttp_t *__pyx_v_ goto __pyx_L13_return; } - /* "aiohttp/_http_parser.pyx":795 + /* "aiohttp/_http_parser.pyx":798 * try: * pyparser._on_chunk_header() * except BaseException as exc: # <<<<<<<<<<<<<< @@ -17610,7 +17752,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_header(llhttp_t *__pyx_v_ } goto __pyx_L5_except_error; - /* "aiohttp/_http_parser.pyx":793 + /* "aiohttp/_http_parser.pyx":796 * cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< @@ -17631,7 +17773,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_header(llhttp_t *__pyx_v_ goto __pyx_L0; } - /* "aiohttp/_http_parser.pyx":791 + /* "aiohttp/_http_parser.pyx":794 * * * cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1: # <<<<<<<<<<<<<< @@ -17653,7 +17795,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_header(llhttp_t *__pyx_v_ return __pyx_r; } -/* "aiohttp/_http_parser.pyx":802 +/* "aiohttp/_http_parser.pyx":805 * * * cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1: # <<<<<<<<<<<<<< @@ -17678,7 +17820,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_complete(llhttp_t *__pyx_ int __pyx_clineno = 0; __Pyx_RefNannySetupContext("cb_on_chunk_complete", 1); - /* "aiohttp/_http_parser.pyx":803 + /* "aiohttp/_http_parser.pyx":806 * * cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1: * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< @@ -17690,7 +17832,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_complete(llhttp_t *__pyx_ __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":804 + /* "aiohttp/_http_parser.pyx":807 * cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< @@ -17706,18 +17848,18 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_complete(llhttp_t *__pyx_ __Pyx_XGOTREF(__pyx_t_4); /*try:*/ { - /* "aiohttp/_http_parser.pyx":805 + /* "aiohttp/_http_parser.pyx":808 * cdef HttpParser pyparser = parser.data * try: * pyparser._on_chunk_complete() # <<<<<<<<<<<<<< * except BaseException as exc: * pyparser._last_error = exc */ - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_chunk_complete(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 805, __pyx_L3_error) + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_chunk_complete(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 808, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":804 + /* "aiohttp/_http_parser.pyx":807 * cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< @@ -17726,7 +17868,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_complete(llhttp_t *__pyx_ */ } - /* "aiohttp/_http_parser.pyx":810 + /* "aiohttp/_http_parser.pyx":813 * return -1 * else: * return 0 # <<<<<<<<<<<<<< @@ -17740,7 +17882,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_complete(llhttp_t *__pyx_ __pyx_L3_error:; __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":806 + /* "aiohttp/_http_parser.pyx":809 * try: * pyparser._on_chunk_complete() * except BaseException as exc: # <<<<<<<<<<<<<< @@ -17750,7 +17892,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_complete(llhttp_t *__pyx_ __pyx_t_5 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); if (__pyx_t_5) { __Pyx_AddTraceback("aiohttp._http_parser.cb_on_chunk_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); - if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(0, 806, __pyx_L5_except_error) + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(0, 809, __pyx_L5_except_error) __Pyx_XGOTREF(__pyx_t_1); __Pyx_XGOTREF(__pyx_t_6); __Pyx_XGOTREF(__pyx_t_7); @@ -17758,7 +17900,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_complete(llhttp_t *__pyx_ __pyx_v_exc = __pyx_t_6; /*try:*/ { - /* "aiohttp/_http_parser.pyx":807 + /* "aiohttp/_http_parser.pyx":810 * pyparser._on_chunk_complete() * except BaseException as exc: * pyparser._last_error = exc # <<<<<<<<<<<<<< @@ -17771,7 +17913,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_complete(llhttp_t *__pyx_ __Pyx_DECREF(__pyx_v_pyparser->_last_error); __pyx_v_pyparser->_last_error = __pyx_v_exc; - /* "aiohttp/_http_parser.pyx":808 + /* "aiohttp/_http_parser.pyx":811 * except BaseException as exc: * pyparser._last_error = exc * return -1 # <<<<<<<<<<<<<< @@ -17785,7 +17927,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_complete(llhttp_t *__pyx_ goto __pyx_L13_return; } - /* "aiohttp/_http_parser.pyx":806 + /* "aiohttp/_http_parser.pyx":809 * try: * pyparser._on_chunk_complete() * except BaseException as exc: # <<<<<<<<<<<<<< @@ -17803,7 +17945,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_complete(llhttp_t *__pyx_ } goto __pyx_L5_except_error; - /* "aiohttp/_http_parser.pyx":804 + /* "aiohttp/_http_parser.pyx":807 * cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< @@ -17824,7 +17966,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_complete(llhttp_t *__pyx_ goto __pyx_L0; } - /* "aiohttp/_http_parser.pyx":802 + /* "aiohttp/_http_parser.pyx":805 * * * cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1: # <<<<<<<<<<<<<< @@ -17846,7 +17988,7 @@ static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_complete(llhttp_t *__pyx_ return __pyx_r; } -/* "aiohttp/_http_parser.pyx":813 +/* "aiohttp/_http_parser.pyx":816 * * * cdef parser_error_from_errno(cparser.llhttp_t* parser, data, pointer): # <<<<<<<<<<<<<< @@ -17870,7 +18012,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(llhttp_ int __pyx_clineno = 0; __Pyx_RefNannySetupContext("parser_error_from_errno", 1); - /* "aiohttp/_http_parser.pyx":814 + /* "aiohttp/_http_parser.pyx":817 * * cdef parser_error_from_errno(cparser.llhttp_t* parser, data, pointer): * cdef cparser.llhttp_errno_t errno = cparser.llhttp_get_errno(parser) # <<<<<<<<<<<<<< @@ -17879,28 +18021,28 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(llhttp_ */ __pyx_v_errno = llhttp_get_errno(__pyx_v_parser); - /* "aiohttp/_http_parser.pyx":815 + /* "aiohttp/_http_parser.pyx":818 * cdef parser_error_from_errno(cparser.llhttp_t* parser, data, pointer): * cdef cparser.llhttp_errno_t errno = cparser.llhttp_get_errno(parser) * cdef bytes desc = cparser.llhttp_get_error_reason(parser) # <<<<<<<<<<<<<< * * err_msg = "{}:\n\n {!r}\n {}".format(desc.decode("latin-1"), data, pointer) */ - __pyx_t_1 = __Pyx_PyBytes_FromString(llhttp_get_error_reason(__pyx_v_parser)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 815, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyBytes_FromString(llhttp_get_error_reason(__pyx_v_parser)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 818, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_v_desc = ((PyObject*)__pyx_t_1); __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":817 + /* "aiohttp/_http_parser.pyx":820 * cdef bytes desc = cparser.llhttp_get_error_reason(parser) * * err_msg = "{}:\n\n {!r}\n {}".format(desc.decode("latin-1"), data, pointer) # <<<<<<<<<<<<<< * * if errno in {cparser.HPE_CB_MESSAGE_BEGIN, */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_kp_u_r, __pyx_n_s_format); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 817, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_kp_u_r, __pyx_n_s_format); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 820, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = __Pyx_decode_bytes(__pyx_v_desc, 0, PY_SSIZE_T_MAX, NULL, NULL, PyUnicode_DecodeLatin1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 817, __pyx_L1_error) + __pyx_t_3 = __Pyx_decode_bytes(__pyx_v_desc, 0, PY_SSIZE_T_MAX, NULL, NULL, PyUnicode_DecodeLatin1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 820, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = NULL; __pyx_t_5 = 0; @@ -17921,14 +18063,14 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(llhttp_ __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_5, 3+__pyx_t_5); __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 817, __pyx_L1_error) + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 820, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; } __pyx_v_err_msg = __pyx_t_1; __pyx_t_1 = 0; - /* "aiohttp/_http_parser.pyx":819 + /* "aiohttp/_http_parser.pyx":822 * err_msg = "{}:\n\n {!r}\n {}".format(desc.decode("latin-1"), data, pointer) * * if errno in {cparser.HPE_CB_MESSAGE_BEGIN, # <<<<<<<<<<<<<< @@ -17939,7 +18081,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(llhttp_ case HPE_CB_MESSAGE_BEGIN: case HPE_CB_HEADERS_COMPLETE: - /* "aiohttp/_http_parser.pyx":820 + /* "aiohttp/_http_parser.pyx":823 * * if errno in {cparser.HPE_CB_MESSAGE_BEGIN, * cparser.HPE_CB_HEADERS_COMPLETE, # <<<<<<<<<<<<<< @@ -17948,7 +18090,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(llhttp_ */ case HPE_CB_MESSAGE_COMPLETE: - /* "aiohttp/_http_parser.pyx":821 + /* "aiohttp/_http_parser.pyx":824 * if errno in {cparser.HPE_CB_MESSAGE_BEGIN, * cparser.HPE_CB_HEADERS_COMPLETE, * cparser.HPE_CB_MESSAGE_COMPLETE, # <<<<<<<<<<<<<< @@ -17957,7 +18099,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(llhttp_ */ case HPE_CB_CHUNK_HEADER: - /* "aiohttp/_http_parser.pyx":822 + /* "aiohttp/_http_parser.pyx":825 * cparser.HPE_CB_HEADERS_COMPLETE, * cparser.HPE_CB_MESSAGE_COMPLETE, * cparser.HPE_CB_CHUNK_HEADER, # <<<<<<<<<<<<<< @@ -17966,7 +18108,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(llhttp_ */ case HPE_CB_CHUNK_COMPLETE: - /* "aiohttp/_http_parser.pyx":823 + /* "aiohttp/_http_parser.pyx":826 * cparser.HPE_CB_MESSAGE_COMPLETE, * cparser.HPE_CB_CHUNK_HEADER, * cparser.HPE_CB_CHUNK_COMPLETE, # <<<<<<<<<<<<<< @@ -17975,7 +18117,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(llhttp_ */ case HPE_INVALID_CONSTANT: - /* "aiohttp/_http_parser.pyx":824 + /* "aiohttp/_http_parser.pyx":827 * cparser.HPE_CB_CHUNK_HEADER, * cparser.HPE_CB_CHUNK_COMPLETE, * cparser.HPE_INVALID_CONSTANT, # <<<<<<<<<<<<<< @@ -17984,7 +18126,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(llhttp_ */ case HPE_INVALID_HEADER_TOKEN: - /* "aiohttp/_http_parser.pyx":825 + /* "aiohttp/_http_parser.pyx":828 * cparser.HPE_CB_CHUNK_COMPLETE, * cparser.HPE_INVALID_CONSTANT, * cparser.HPE_INVALID_HEADER_TOKEN, # <<<<<<<<<<<<<< @@ -17993,7 +18135,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(llhttp_ */ case HPE_INVALID_CONTENT_LENGTH: - /* "aiohttp/_http_parser.pyx":826 + /* "aiohttp/_http_parser.pyx":829 * cparser.HPE_INVALID_CONSTANT, * cparser.HPE_INVALID_HEADER_TOKEN, * cparser.HPE_INVALID_CONTENT_LENGTH, # <<<<<<<<<<<<<< @@ -18002,7 +18144,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(llhttp_ */ case HPE_INVALID_CHUNK_SIZE: - /* "aiohttp/_http_parser.pyx":827 + /* "aiohttp/_http_parser.pyx":830 * cparser.HPE_INVALID_HEADER_TOKEN, * cparser.HPE_INVALID_CONTENT_LENGTH, * cparser.HPE_INVALID_CHUNK_SIZE, # <<<<<<<<<<<<<< @@ -18011,7 +18153,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(llhttp_ */ case HPE_INVALID_EOF_STATE: - /* "aiohttp/_http_parser.pyx":828 + /* "aiohttp/_http_parser.pyx":831 * cparser.HPE_INVALID_CONTENT_LENGTH, * cparser.HPE_INVALID_CHUNK_SIZE, * cparser.HPE_INVALID_EOF_STATE, # <<<<<<<<<<<<<< @@ -18020,7 +18162,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(llhttp_ */ case HPE_INVALID_TRANSFER_ENCODING: - /* "aiohttp/_http_parser.pyx":830 + /* "aiohttp/_http_parser.pyx":833 * cparser.HPE_INVALID_EOF_STATE, * cparser.HPE_INVALID_TRANSFER_ENCODING}: * return BadHttpMessage(err_msg) # <<<<<<<<<<<<<< @@ -18028,7 +18170,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(llhttp_ * cparser.HPE_INVALID_METHOD, */ __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_BadHttpMessage); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 830, __pyx_L1_error) + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_BadHttpMessage); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 833, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = NULL; __pyx_t_5 = 0; @@ -18048,7 +18190,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(llhttp_ PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_err_msg}; __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 830, __pyx_L1_error) + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 833, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; } @@ -18056,7 +18198,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(llhttp_ __pyx_t_1 = 0; goto __pyx_L0; - /* "aiohttp/_http_parser.pyx":819 + /* "aiohttp/_http_parser.pyx":822 * err_msg = "{}:\n\n {!r}\n {}".format(desc.decode("latin-1"), data, pointer) * * if errno in {cparser.HPE_CB_MESSAGE_BEGIN, # <<<<<<<<<<<<<< @@ -18066,7 +18208,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(llhttp_ break; case HPE_INVALID_STATUS: - /* "aiohttp/_http_parser.pyx":831 + /* "aiohttp/_http_parser.pyx":834 * cparser.HPE_INVALID_TRANSFER_ENCODING}: * return BadHttpMessage(err_msg) * elif errno in {cparser.HPE_INVALID_STATUS, # <<<<<<<<<<<<<< @@ -18075,7 +18217,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(llhttp_ */ case HPE_INVALID_METHOD: - /* "aiohttp/_http_parser.pyx":832 + /* "aiohttp/_http_parser.pyx":835 * return BadHttpMessage(err_msg) * elif errno in {cparser.HPE_INVALID_STATUS, * cparser.HPE_INVALID_METHOD, # <<<<<<<<<<<<<< @@ -18084,7 +18226,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(llhttp_ */ case HPE_INVALID_VERSION: - /* "aiohttp/_http_parser.pyx":834 + /* "aiohttp/_http_parser.pyx":837 * cparser.HPE_INVALID_METHOD, * cparser.HPE_INVALID_VERSION}: * return BadStatusLine(error=err_msg) # <<<<<<<<<<<<<< @@ -18092,12 +18234,12 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(llhttp_ * return InvalidURLError(err_msg) */ __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_BadStatusLine); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 834, __pyx_L1_error) + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_BadStatusLine); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 837, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 834, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 837, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_error, __pyx_v_err_msg) < 0) __PYX_ERR(0, 834, __pyx_L1_error) - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_empty_tuple, __pyx_t_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 834, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_error, __pyx_v_err_msg) < 0) __PYX_ERR(0, 837, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_empty_tuple, __pyx_t_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 837, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; @@ -18105,7 +18247,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(llhttp_ __pyx_t_3 = 0; goto __pyx_L0; - /* "aiohttp/_http_parser.pyx":831 + /* "aiohttp/_http_parser.pyx":834 * cparser.HPE_INVALID_TRANSFER_ENCODING}: * return BadHttpMessage(err_msg) * elif errno in {cparser.HPE_INVALID_STATUS, # <<<<<<<<<<<<<< @@ -18115,7 +18257,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(llhttp_ break; case HPE_INVALID_URL: - /* "aiohttp/_http_parser.pyx":836 + /* "aiohttp/_http_parser.pyx":839 * return BadStatusLine(error=err_msg) * elif errno == cparser.HPE_INVALID_URL: * return InvalidURLError(err_msg) # <<<<<<<<<<<<<< @@ -18123,7 +18265,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(llhttp_ * return BadHttpMessage(err_msg) */ __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_InvalidURLError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 836, __pyx_L1_error) + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_InvalidURLError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 839, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_1 = NULL; __pyx_t_5 = 0; @@ -18143,7 +18285,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(llhttp_ PyObject *__pyx_callargs[2] = {__pyx_t_1, __pyx_v_err_msg}; __pyx_t_3 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; - if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 836, __pyx_L1_error) + if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 839, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; } @@ -18151,7 +18293,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(llhttp_ __pyx_t_3 = 0; goto __pyx_L0; - /* "aiohttp/_http_parser.pyx":835 + /* "aiohttp/_http_parser.pyx":838 * cparser.HPE_INVALID_VERSION}: * return BadStatusLine(error=err_msg) * elif errno == cparser.HPE_INVALID_URL: # <<<<<<<<<<<<<< @@ -18162,13 +18304,13 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(llhttp_ default: break; } - /* "aiohttp/_http_parser.pyx":838 + /* "aiohttp/_http_parser.pyx":841 * return InvalidURLError(err_msg) * * return BadHttpMessage(err_msg) # <<<<<<<<<<<<<< */ __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_BadHttpMessage); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 838, __pyx_L1_error) + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_BadHttpMessage); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 841, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_1 = NULL; __pyx_t_5 = 0; @@ -18188,7 +18330,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(llhttp_ PyObject *__pyx_callargs[2] = {__pyx_t_1, __pyx_v_err_msg}; __pyx_t_3 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; - if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 838, __pyx_L1_error) + if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 841, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; } @@ -18196,7 +18338,7 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(llhttp_ __pyx_t_3 = 0; goto __pyx_L0; - /* "aiohttp/_http_parser.pyx":813 + /* "aiohttp/_http_parser.pyx":816 * * * cdef parser_error_from_errno(cparser.llhttp_t* parser, data, pointer): # <<<<<<<<<<<<<< @@ -18376,7 +18518,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser___pyx_unpickle_RawRequestMessa */ __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_t_1, __pyx_tuple__11, Py_NE)); if (unlikely((__pyx_t_2 < 0))) __PYX_ERR(1, 4, __pyx_L1_error) + __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_t_1, __pyx_tuple__12, Py_NE)); if (unlikely((__pyx_t_2 < 0))) __PYX_ERR(1, 4, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; if (__pyx_t_2) { @@ -18920,7 +19062,7 @@ static PyObject *__pyx_pf_7aiohttp_12_http_parser_2__pyx_unpickle_RawResponseMes */ __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_t_1, __pyx_tuple__13, Py_NE)); if (unlikely((__pyx_t_2 < 0))) __PYX_ERR(1, 4, __pyx_L1_error) + __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_t_1, __pyx_tuple__14, Py_NE)); if (unlikely((__pyx_t_2 < 0))) __PYX_ERR(1, 4, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; if (__pyx_t_2) { @@ -19294,8 +19436,10 @@ static PyObject *__pyx_f_7aiohttp_12_http_parser___pyx_unpickle_RawResponseMessa return __pyx_r; } +#if CYTHON_USE_FREELISTS static struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_freelist_7aiohttp_12_http_parser_RawRequestMessage[250]; static int __pyx_freecount_7aiohttp_12_http_parser_RawRequestMessage = 0; +#endif static PyObject *__pyx_tp_new_7aiohttp_12_http_parser_RawRequestMessage(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *p; @@ -19304,7 +19448,7 @@ static PyObject *__pyx_tp_new_7aiohttp_12_http_parser_RawRequestMessage(PyTypeOb allocfunc alloc_func = (allocfunc)PyType_GetSlot(t, Py_tp_alloc); o = alloc_func(t, 0); #else - #if CYTHON_COMPILING_IN_CPYTHON + #if CYTHON_USE_FREELISTS if (likely((int)(__pyx_freecount_7aiohttp_12_http_parser_RawRequestMessage > 0) & (int)(t->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage)) & (int)(!__Pyx_PyType_HasFeature(t, (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE))))) { o = (PyObject*)__pyx_freelist_7aiohttp_12_http_parser_RawRequestMessage[--__pyx_freecount_7aiohttp_12_http_parser_RawRequestMessage]; memset(o, 0, sizeof(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage)); @@ -19355,7 +19499,7 @@ static void __pyx_tp_dealloc_7aiohttp_12_http_parser_RawRequestMessage(PyObject Py_CLEAR(p->upgrade); Py_CLEAR(p->chunked); Py_CLEAR(p->url); - #if CYTHON_COMPILING_IN_CPYTHON + #if CYTHON_USE_FREELISTS if (((int)(__pyx_freecount_7aiohttp_12_http_parser_RawRequestMessage < 250) & (int)(Py_TYPE(o)->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage)) & (int)(!__Pyx_PyType_HasFeature(Py_TYPE(o), (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE))))) { __pyx_freelist_7aiohttp_12_http_parser_RawRequestMessage[__pyx_freecount_7aiohttp_12_http_parser_RawRequestMessage++] = ((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)o); } else @@ -19600,8 +19744,10 @@ static PyTypeObject __pyx_type_7aiohttp_12_http_parser_RawRequestMessage = { }; #endif +#if CYTHON_USE_FREELISTS static struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_freelist_7aiohttp_12_http_parser_RawResponseMessage[250]; static int __pyx_freecount_7aiohttp_12_http_parser_RawResponseMessage = 0; +#endif static PyObject *__pyx_tp_new_7aiohttp_12_http_parser_RawResponseMessage(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *p; @@ -19610,7 +19756,7 @@ static PyObject *__pyx_tp_new_7aiohttp_12_http_parser_RawResponseMessage(PyTypeO allocfunc alloc_func = (allocfunc)PyType_GetSlot(t, Py_tp_alloc); o = alloc_func(t, 0); #else - #if CYTHON_COMPILING_IN_CPYTHON + #if CYTHON_USE_FREELISTS if (likely((int)(__pyx_freecount_7aiohttp_12_http_parser_RawResponseMessage > 0) & (int)(t->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage)) & (int)(!__Pyx_PyType_HasFeature(t, (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE))))) { o = (PyObject*)__pyx_freelist_7aiohttp_12_http_parser_RawResponseMessage[--__pyx_freecount_7aiohttp_12_http_parser_RawResponseMessage]; memset(o, 0, sizeof(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage)); @@ -19657,7 +19803,7 @@ static void __pyx_tp_dealloc_7aiohttp_12_http_parser_RawResponseMessage(PyObject Py_CLEAR(p->compression); Py_CLEAR(p->upgrade); Py_CLEAR(p->chunked); - #if CYTHON_COMPILING_IN_CPYTHON + #if CYTHON_USE_FREELISTS if (((int)(__pyx_freecount_7aiohttp_12_http_parser_RawResponseMessage < 250) & (int)(Py_TYPE(o)->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage)) & (int)(!__Pyx_PyType_HasFeature(Py_TYPE(o), (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE))))) { __pyx_freelist_7aiohttp_12_http_parser_RawResponseMessage[__pyx_freecount_7aiohttp_12_http_parser_RawResponseMessage++] = ((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)o); } else @@ -20389,8 +20535,10 @@ static PyTypeObject __pyx_type_7aiohttp_12_http_parser_HttpResponseParser = { }; #endif +#if CYTHON_USE_FREELISTS static struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct__genexpr *__pyx_freelist_7aiohttp_12_http_parser___pyx_scope_struct__genexpr[8]; static int __pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct__genexpr = 0; +#endif static PyObject *__pyx_tp_new_7aiohttp_12_http_parser___pyx_scope_struct__genexpr(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { PyObject *o; @@ -20398,7 +20546,7 @@ static PyObject *__pyx_tp_new_7aiohttp_12_http_parser___pyx_scope_struct__genexp allocfunc alloc_func = (allocfunc)PyType_GetSlot(t, Py_tp_alloc); o = alloc_func(t, 0); #else - #if CYTHON_COMPILING_IN_CPYTHON + #if CYTHON_USE_FREELISTS if (likely((int)(__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct__genexpr > 0) & (int)(t->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct__genexpr)))) { o = (PyObject*)__pyx_freelist_7aiohttp_12_http_parser___pyx_scope_struct__genexpr[--__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct__genexpr]; memset(o, 0, sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct__genexpr)); @@ -20427,7 +20575,7 @@ static void __pyx_tp_dealloc_7aiohttp_12_http_parser___pyx_scope_struct__genexpr Py_CLEAR(p->__pyx_genexpr_arg_0); Py_CLEAR(p->__pyx_v_name); Py_CLEAR(p->__pyx_v_val); - #if CYTHON_COMPILING_IN_CPYTHON + #if CYTHON_USE_FREELISTS if (((int)(__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct__genexpr < 8) & (int)(Py_TYPE(o)->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct__genexpr)))) { __pyx_freelist_7aiohttp_12_http_parser___pyx_scope_struct__genexpr[__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct__genexpr++] = ((struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct__genexpr *)o); } else @@ -20556,8 +20704,10 @@ static PyTypeObject __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct__genex }; #endif +#if CYTHON_USE_FREELISTS static struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr *__pyx_freelist_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr[8]; static int __pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr = 0; +#endif static PyObject *__pyx_tp_new_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { PyObject *o; @@ -20565,7 +20715,7 @@ static PyObject *__pyx_tp_new_7aiohttp_12_http_parser___pyx_scope_struct_1_genex allocfunc alloc_func = (allocfunc)PyType_GetSlot(t, Py_tp_alloc); o = alloc_func(t, 0); #else - #if CYTHON_COMPILING_IN_CPYTHON + #if CYTHON_USE_FREELISTS if (likely((int)(__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr > 0) & (int)(t->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr)))) { o = (PyObject*)__pyx_freelist_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr[--__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr]; memset(o, 0, sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr)); @@ -20594,7 +20744,7 @@ static void __pyx_tp_dealloc_7aiohttp_12_http_parser___pyx_scope_struct_1_genexp Py_CLEAR(p->__pyx_genexpr_arg_0); Py_CLEAR(p->__pyx_v_name); Py_CLEAR(p->__pyx_v_val); - #if CYTHON_COMPILING_IN_CPYTHON + #if CYTHON_USE_FREELISTS if (((int)(__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr < 8) & (int)(Py_TYPE(o)->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr)))) { __pyx_freelist_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr[__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr++] = ((struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr *)o); } else @@ -20755,6 +20905,7 @@ static int __Pyx_CreateStringTabAndInitStrings(void) { {&__pyx_n_s_ACCESS_CONTROL_REQUEST_METHOD, __pyx_k_ACCESS_CONTROL_REQUEST_METHOD, sizeof(__pyx_k_ACCESS_CONTROL_REQUEST_METHOD), 0, 0, 1, 1}, {&__pyx_n_s_AGE, __pyx_k_AGE, sizeof(__pyx_k_AGE), 0, 0, 1, 1}, {&__pyx_n_s_ALLOW, __pyx_k_ALLOW, sizeof(__pyx_k_ALLOW), 0, 0, 1, 1}, + {&__pyx_n_s_ALLOWED_UPGRADES, __pyx_k_ALLOWED_UPGRADES, sizeof(__pyx_k_ALLOWED_UPGRADES), 0, 0, 1, 1}, {&__pyx_n_s_AUTHORIZATION, __pyx_k_AUTHORIZATION, sizeof(__pyx_k_AUTHORIZATION), 0, 0, 1, 1}, {&__pyx_n_s_BadHttpMessage, __pyx_k_BadHttpMessage, sizeof(__pyx_k_BadHttpMessage), 0, 0, 1, 1}, {&__pyx_n_s_BadStatusLine, __pyx_k_BadStatusLine, sizeof(__pyx_k_BadStatusLine), 0, 0, 1, 1}, @@ -20879,17 +21030,17 @@ static int __Pyx_CreateStringTabAndInitStrings(void) { {&__pyx_n_s_X_FORWARDED_FOR, __pyx_k_X_FORWARDED_FOR, sizeof(__pyx_k_X_FORWARDED_FOR), 0, 0, 1, 1}, {&__pyx_n_s_X_FORWARDED_HOST, __pyx_k_X_FORWARDED_HOST, sizeof(__pyx_k_X_FORWARDED_HOST), 0, 0, 1, 1}, {&__pyx_n_s_X_FORWARDED_PROTO, __pyx_k_X_FORWARDED_PROTO, sizeof(__pyx_k_X_FORWARDED_PROTO), 0, 0, 1, 1}, + {&__pyx_n_s__10, __pyx_k__10, sizeof(__pyx_k__10), 0, 0, 1, 1}, {&__pyx_kp_u__10, __pyx_k__10, sizeof(__pyx_k__10), 0, 1, 0, 0}, - {&__pyx_kp_u__12, __pyx_k__12, sizeof(__pyx_k__12), 0, 1, 0, 0}, + {&__pyx_kp_u__11, __pyx_k__11, sizeof(__pyx_k__11), 0, 1, 0, 0}, + {&__pyx_kp_u__13, __pyx_k__13, sizeof(__pyx_k__13), 0, 1, 0, 0}, {&__pyx_kp_u__2, __pyx_k__2, sizeof(__pyx_k__2), 0, 1, 0, 0}, {&__pyx_kp_u__3, __pyx_k__3, sizeof(__pyx_k__3), 0, 1, 0, 0}, + {&__pyx_n_s__4, __pyx_k__4, sizeof(__pyx_k__4), 0, 0, 1, 1}, {&__pyx_kp_b__4, __pyx_k__4, sizeof(__pyx_k__4), 0, 0, 0, 0}, - {&__pyx_kp_u__6, __pyx_k__6, sizeof(__pyx_k__6), 0, 1, 0, 0}, - {&__pyx_kp_u__7, __pyx_k__7, sizeof(__pyx_k__7), 0, 1, 0, 0}, - {&__pyx_n_s__8, __pyx_k__8, sizeof(__pyx_k__8), 0, 0, 1, 1}, - {&__pyx_kp_b__8, __pyx_k__8, sizeof(__pyx_k__8), 0, 0, 0, 0}, + {&__pyx_kp_u__4, __pyx_k__4, sizeof(__pyx_k__4), 0, 1, 0, 0}, + {&__pyx_kp_b__6, __pyx_k__6, sizeof(__pyx_k__6), 0, 0, 0, 0}, {&__pyx_kp_u__8, __pyx_k__8, sizeof(__pyx_k__8), 0, 1, 0, 0}, - {&__pyx_n_s__9, __pyx_k__9, sizeof(__pyx_k__9), 0, 0, 1, 1}, {&__pyx_kp_u__9, __pyx_k__9, sizeof(__pyx_k__9), 0, 1, 0, 0}, {&__pyx_n_s_add, __pyx_k_add, sizeof(__pyx_k_add), 0, 0, 1, 1}, {&__pyx_n_s_after, __pyx_k_after, sizeof(__pyx_k_after), 0, 0, 1, 1}, @@ -20935,6 +21086,7 @@ static int __Pyx_CreateStringTabAndInitStrings(void) { {&__pyx_n_s_fragment, __pyx_k_fragment, sizeof(__pyx_k_fragment), 0, 0, 1, 1}, {&__pyx_kp_u_gc, __pyx_k_gc, sizeof(__pyx_k_gc), 0, 1, 0, 0}, {&__pyx_n_s_genexpr, __pyx_k_genexpr, sizeof(__pyx_k_genexpr), 0, 0, 1, 1}, + {&__pyx_n_s_get, __pyx_k_get, sizeof(__pyx_k_get), 0, 0, 1, 1}, {&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1}, {&__pyx_n_u_gzip, __pyx_k_gzip, sizeof(__pyx_k_gzip), 0, 1, 0, 1}, {&__pyx_n_s_hdrs, __pyx_k_hdrs, sizeof(__pyx_k_hdrs), 0, 0, 1, 1}, @@ -21017,6 +21169,7 @@ static int __Pyx_CreateStringTabAndInitStrings(void) { {&__pyx_n_s_val, __pyx_k_val, sizeof(__pyx_k_val), 0, 0, 1, 1}, {&__pyx_n_s_version, __pyx_k_version, sizeof(__pyx_k_version), 0, 0, 1, 1}, {&__pyx_n_u_version, __pyx_k_version, sizeof(__pyx_k_version), 0, 1, 0, 1}, + {&__pyx_n_u_websocket, __pyx_k_websocket, sizeof(__pyx_k_websocket), 0, 1, 0, 1}, {&__pyx_n_s_yarl, __pyx_k_yarl, sizeof(__pyx_k_yarl), 0, 0, 1, 1}, {0, 0, 0, 0, 0, 0, 0} }; @@ -21024,10 +21177,10 @@ static int __Pyx_CreateStringTabAndInitStrings(void) { } /* #### Code section: cached_builtins ### */ static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { - __pyx_builtin_range = __Pyx_GetBuiltinName(__pyx_n_s_range); if (!__pyx_builtin_range) __PYX_ERR(0, 87, __pyx_L1_error) - __pyx_builtin_MemoryError = __Pyx_GetBuiltinName(__pyx_n_s_MemoryError); if (!__pyx_builtin_MemoryError) __PYX_ERR(0, 316, __pyx_L1_error) + __pyx_builtin_range = __Pyx_GetBuiltinName(__pyx_n_s_range); if (!__pyx_builtin_range) __PYX_ERR(0, 88, __pyx_L1_error) + __pyx_builtin_MemoryError = __Pyx_GetBuiltinName(__pyx_n_s_MemoryError); if (!__pyx_builtin_MemoryError) __PYX_ERR(0, 317, __pyx_L1_error) __pyx_builtin_TypeError = __Pyx_GetBuiltinName(__pyx_n_s_TypeError); if (!__pyx_builtin_TypeError) __PYX_ERR(1, 2, __pyx_L1_error) - __pyx_builtin_BaseException = __Pyx_GetBuiltinName(__pyx_n_s_BaseException); if (!__pyx_builtin_BaseException) __PYX_ERR(0, 684, __pyx_L1_error) + __pyx_builtin_BaseException = __Pyx_GetBuiltinName(__pyx_n_s_BaseException); if (!__pyx_builtin_BaseException) __PYX_ERR(0, 690, __pyx_L1_error) return 0; __pyx_L1_error:; return -1; @@ -21038,16 +21191,27 @@ static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); - /* "aiohttp/_http_parser.pyx":551 + /* "aiohttp/_http_parser.pyx":429 + * + * if self._cparser.type == cparser.HTTP_REQUEST: + * allowed = upgrade and headers.get("upgrade", "").lower() in ALLOWED_UPGRADES # <<<<<<<<<<<<<< + * if allowed or self._cparser.method == cparser.HTTP_CONNECT: + * self._upgraded = True + */ + __pyx_tuple__5 = PyTuple_Pack(2, __pyx_n_u_upgrade, __pyx_kp_u__4); if (unlikely(!__pyx_tuple__5)) __PYX_ERR(0, 429, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__5); + __Pyx_GIVEREF(__pyx_tuple__5); + + /* "aiohttp/_http_parser.pyx":557 * after = cparser.llhttp_get_error_pos(self._cparser) * before = data[:after - self.py_buf.buf] * after_b = after.split(b"\r\n", 1)[0] # <<<<<<<<<<<<<< * before = before.rsplit(b"\r\n", 1)[-1] * data = before + after_b */ - __pyx_tuple__5 = PyTuple_Pack(2, __pyx_kp_b__4, __pyx_int_1); if (unlikely(!__pyx_tuple__5)) __PYX_ERR(0, 551, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__5); - __Pyx_GIVEREF(__pyx_tuple__5); + __pyx_tuple__7 = PyTuple_Pack(2, __pyx_kp_b__6, __pyx_int_1); if (unlikely(!__pyx_tuple__7)) __PYX_ERR(0, 557, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__7); + __Pyx_GIVEREF(__pyx_tuple__7); /* "(tree fragment)":4 * cdef object __pyx_PickleError @@ -21056,45 +21220,45 @@ static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { * from pickle import PickleError as __pyx_PickleError * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xf602a9f, 0x03e51e3, 0x1408252) = (chunked, compression, headers, method, path, raw_headers, should_close, upgrade, url, version))" % __pyx_checksum */ - __pyx_tuple__11 = PyTuple_Pack(3, __pyx_int_257960607, __pyx_int_4084195, __pyx_int_21004882); if (unlikely(!__pyx_tuple__11)) __PYX_ERR(1, 4, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__11); - __Pyx_GIVEREF(__pyx_tuple__11); - __pyx_tuple__13 = PyTuple_Pack(3, __pyx_int_213037754, __pyx_int_83803150, __pyx_int_209127132); if (unlikely(!__pyx_tuple__13)) __PYX_ERR(1, 4, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__13); - __Pyx_GIVEREF(__pyx_tuple__13); + __pyx_tuple__12 = PyTuple_Pack(3, __pyx_int_257960607, __pyx_int_4084195, __pyx_int_21004882); if (unlikely(!__pyx_tuple__12)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__12); + __Pyx_GIVEREF(__pyx_tuple__12); + __pyx_tuple__14 = PyTuple_Pack(3, __pyx_int_213037754, __pyx_int_83803150, __pyx_int_209127132); if (unlikely(!__pyx_tuple__14)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__14); + __Pyx_GIVEREF(__pyx_tuple__14); - /* "aiohttp/_http_parser.pyx":57 + /* "aiohttp/_http_parser.pyx":58 * char* PyByteArray_AsString(object) * * __all__ = ('HttpRequestParser', 'HttpResponseParser', # <<<<<<<<<<<<<< * 'RawRequestMessage', 'RawResponseMessage') * */ - __pyx_tuple__14 = PyTuple_Pack(4, __pyx_n_u_HttpRequestParser, __pyx_n_u_HttpResponseParser, __pyx_n_u_RawRequestMessage_2, __pyx_n_u_RawResponseMessage_2); if (unlikely(!__pyx_tuple__14)) __PYX_ERR(0, 57, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__14); - __Pyx_GIVEREF(__pyx_tuple__14); + __pyx_tuple__15 = PyTuple_Pack(4, __pyx_n_u_HttpRequestParser, __pyx_n_u_HttpResponseParser, __pyx_n_u_RawRequestMessage_2, __pyx_n_u_RawResponseMessage_2); if (unlikely(!__pyx_tuple__15)) __PYX_ERR(0, 58, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__15); + __Pyx_GIVEREF(__pyx_tuple__15); - /* "aiohttp/_http_parser.pyx":150 + /* "aiohttp/_http_parser.pyx":151 * return '' * * def _replace(self, **dct): # <<<<<<<<<<<<<< * cdef RawRequestMessage ret * ret = _new_request_message(self.method, */ - __pyx_tuple__15 = PyTuple_Pack(3, __pyx_n_s_self, __pyx_n_s_dct, __pyx_n_s_ret); if (unlikely(!__pyx_tuple__15)) __PYX_ERR(0, 150, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__15); - __Pyx_GIVEREF(__pyx_tuple__15); - __pyx_codeobj__16 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS|CO_VARKEYWORDS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__15, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_aiohttp__http_parser_pyx, __pyx_n_s_replace, 150, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__16)) __PYX_ERR(0, 150, __pyx_L1_error) + __pyx_tuple__16 = PyTuple_Pack(3, __pyx_n_s_self, __pyx_n_s_dct, __pyx_n_s_ret); if (unlikely(!__pyx_tuple__16)) __PYX_ERR(0, 151, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__16); + __Pyx_GIVEREF(__pyx_tuple__16); + __pyx_codeobj__17 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS|CO_VARKEYWORDS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__16, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_aiohttp__http_parser_pyx, __pyx_n_s_replace, 151, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__17)) __PYX_ERR(0, 151, __pyx_L1_error) /* "(tree fragment)":1 * def __reduce_cython__(self): # <<<<<<<<<<<<<< * cdef tuple state * cdef object _dict */ - __pyx_tuple__17 = PyTuple_Pack(4, __pyx_n_s_self, __pyx_n_s_state, __pyx_n_s_dict_2, __pyx_n_s_use_setstate); if (unlikely(!__pyx_tuple__17)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__17); - __Pyx_GIVEREF(__pyx_tuple__17); - __pyx_codeobj__18 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 4, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_reduce_cython, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__18)) __PYX_ERR(1, 1, __pyx_L1_error) + __pyx_tuple__18 = PyTuple_Pack(4, __pyx_n_s_self, __pyx_n_s_state, __pyx_n_s_dict_2, __pyx_n_s_use_setstate); if (unlikely(!__pyx_tuple__18)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__18); + __Pyx_GIVEREF(__pyx_tuple__18); + __pyx_codeobj__19 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 4, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__18, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_reduce_cython, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__19)) __PYX_ERR(1, 1, __pyx_L1_error) /* "(tree fragment)":16 * else: @@ -21102,17 +21266,17 @@ static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< * __pyx_unpickle_RawRequestMessage__set_state(self, __pyx_state) */ - __pyx_tuple__19 = PyTuple_Pack(2, __pyx_n_s_self, __pyx_n_s_pyx_state); if (unlikely(!__pyx_tuple__19)) __PYX_ERR(1, 16, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__19); - __Pyx_GIVEREF(__pyx_tuple__19); - __pyx_codeobj__20 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__19, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_setstate_cython, 16, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__20)) __PYX_ERR(1, 16, __pyx_L1_error) + __pyx_tuple__20 = PyTuple_Pack(2, __pyx_n_s_self, __pyx_n_s_pyx_state); if (unlikely(!__pyx_tuple__20)) __PYX_ERR(1, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__20); + __Pyx_GIVEREF(__pyx_tuple__20); + __pyx_codeobj__21 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__20, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_setstate_cython, 16, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__21)) __PYX_ERR(1, 16, __pyx_L1_error) /* "(tree fragment)":1 * def __reduce_cython__(self): # <<<<<<<<<<<<<< * cdef tuple state * cdef object _dict */ - __pyx_codeobj__21 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 4, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_reduce_cython, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__21)) __PYX_ERR(1, 1, __pyx_L1_error) + __pyx_codeobj__22 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 4, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__18, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_reduce_cython, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__22)) __PYX_ERR(1, 1, __pyx_L1_error) /* "(tree fragment)":16 * else: @@ -21120,53 +21284,53 @@ static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< * __pyx_unpickle_RawResponseMessage__set_state(self, __pyx_state) */ - __pyx_codeobj__22 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__19, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_setstate_cython, 16, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__22)) __PYX_ERR(1, 16, __pyx_L1_error) + __pyx_codeobj__23 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__20, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_setstate_cython, 16, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__23)) __PYX_ERR(1, 16, __pyx_L1_error) - /* "aiohttp/_http_parser.pyx":502 + /* "aiohttp/_http_parser.pyx":508 * ### Public API ### * * def feed_eof(self): # <<<<<<<<<<<<<< * cdef bytes desc * */ - __pyx_tuple__23 = PyTuple_Pack(2, __pyx_n_s_self, __pyx_n_s_desc); if (unlikely(!__pyx_tuple__23)) __PYX_ERR(0, 502, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__23); - __Pyx_GIVEREF(__pyx_tuple__23); - __pyx_codeobj__24 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__23, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_aiohttp__http_parser_pyx, __pyx_n_s_feed_eof, 502, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__24)) __PYX_ERR(0, 502, __pyx_L1_error) + __pyx_tuple__24 = PyTuple_Pack(2, __pyx_n_s_self, __pyx_n_s_desc); if (unlikely(!__pyx_tuple__24)) __PYX_ERR(0, 508, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__24); + __Pyx_GIVEREF(__pyx_tuple__24); + __pyx_codeobj__25 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__24, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_aiohttp__http_parser_pyx, __pyx_n_s_feed_eof, 508, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__25)) __PYX_ERR(0, 508, __pyx_L1_error) - /* "aiohttp/_http_parser.pyx":522 + /* "aiohttp/_http_parser.pyx":528 * return self._messages[-1][0] * * def feed_data(self, data): # <<<<<<<<<<<<<< * cdef: * size_t data_len */ - __pyx_tuple__25 = PyTuple_Pack(11, __pyx_n_s_self, __pyx_n_s_data, __pyx_n_s_data_len, __pyx_n_s_nb, __pyx_n_s_errno, __pyx_n_s_ex, __pyx_n_s_after, __pyx_n_s_before, __pyx_n_s_after_b, __pyx_n_s_pointer, __pyx_n_s_messages); if (unlikely(!__pyx_tuple__25)) __PYX_ERR(0, 522, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__25); - __Pyx_GIVEREF(__pyx_tuple__25); - __pyx_codeobj__26 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 11, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__25, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_aiohttp__http_parser_pyx, __pyx_n_s_feed_data, 522, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__26)) __PYX_ERR(0, 522, __pyx_L1_error) + __pyx_tuple__26 = PyTuple_Pack(11, __pyx_n_s_self, __pyx_n_s_data, __pyx_n_s_data_len, __pyx_n_s_nb, __pyx_n_s_errno, __pyx_n_s_ex, __pyx_n_s_after, __pyx_n_s_before, __pyx_n_s_after_b, __pyx_n_s_pointer, __pyx_n_s_messages); if (unlikely(!__pyx_tuple__26)) __PYX_ERR(0, 528, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__26); + __Pyx_GIVEREF(__pyx_tuple__26); + __pyx_codeobj__27 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 11, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__26, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_aiohttp__http_parser_pyx, __pyx_n_s_feed_data, 528, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__27)) __PYX_ERR(0, 528, __pyx_L1_error) - /* "aiohttp/_http_parser.pyx":570 - * return messages, False, b'' + /* "aiohttp/_http_parser.pyx":576 + * return messages, False, b"" * * def set_upgraded(self, val): # <<<<<<<<<<<<<< * self._upgraded = val * */ - __pyx_tuple__27 = PyTuple_Pack(2, __pyx_n_s_self, __pyx_n_s_val); if (unlikely(!__pyx_tuple__27)) __PYX_ERR(0, 570, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__27); - __Pyx_GIVEREF(__pyx_tuple__27); - __pyx_codeobj__28 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__27, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_aiohttp__http_parser_pyx, __pyx_n_s_set_upgraded, 570, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__28)) __PYX_ERR(0, 570, __pyx_L1_error) + __pyx_tuple__28 = PyTuple_Pack(2, __pyx_n_s_self, __pyx_n_s_val); if (unlikely(!__pyx_tuple__28)) __PYX_ERR(0, 576, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__28); + __Pyx_GIVEREF(__pyx_tuple__28); + __pyx_codeobj__29 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__28, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_aiohttp__http_parser_pyx, __pyx_n_s_set_upgraded, 576, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__29)) __PYX_ERR(0, 576, __pyx_L1_error) /* "(tree fragment)":1 * def __reduce_cython__(self): # <<<<<<<<<<<<<< * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" * def __setstate_cython__(self, __pyx_state): */ - __pyx_tuple__29 = PyTuple_Pack(1, __pyx_n_s_self); if (unlikely(!__pyx_tuple__29)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__29); - __Pyx_GIVEREF(__pyx_tuple__29); - __pyx_codeobj__30 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__29, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_reduce_cython, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__30)) __PYX_ERR(1, 1, __pyx_L1_error) + __pyx_tuple__30 = PyTuple_Pack(1, __pyx_n_s_self); if (unlikely(!__pyx_tuple__30)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__30); + __Pyx_GIVEREF(__pyx_tuple__30); + __pyx_codeobj__31 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__30, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_reduce_cython, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__31)) __PYX_ERR(1, 1, __pyx_L1_error) /* "(tree fragment)":3 * def __reduce_cython__(self): @@ -21174,14 +21338,14 @@ static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" */ - __pyx_codeobj__31 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__19, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_setstate_cython, 3, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__31)) __PYX_ERR(1, 3, __pyx_L1_error) + __pyx_codeobj__32 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__20, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_setstate_cython, 3, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__32)) __PYX_ERR(1, 3, __pyx_L1_error) /* "(tree fragment)":1 * def __reduce_cython__(self): # <<<<<<<<<<<<<< * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" * def __setstate_cython__(self, __pyx_state): */ - __pyx_codeobj__32 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__29, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_reduce_cython, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__32)) __PYX_ERR(1, 1, __pyx_L1_error) + __pyx_codeobj__33 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__30, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_reduce_cython, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__33)) __PYX_ERR(1, 1, __pyx_L1_error) /* "(tree fragment)":3 * def __reduce_cython__(self): @@ -21189,14 +21353,14 @@ static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" */ - __pyx_codeobj__33 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__19, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_setstate_cython, 3, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__33)) __PYX_ERR(1, 3, __pyx_L1_error) + __pyx_codeobj__34 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__20, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_setstate_cython, 3, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__34)) __PYX_ERR(1, 3, __pyx_L1_error) /* "(tree fragment)":1 * def __reduce_cython__(self): # <<<<<<<<<<<<<< * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" * def __setstate_cython__(self, __pyx_state): */ - __pyx_codeobj__34 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__29, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_reduce_cython, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__34)) __PYX_ERR(1, 1, __pyx_L1_error) + __pyx_codeobj__35 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__30, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_reduce_cython, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__35)) __PYX_ERR(1, 1, __pyx_L1_error) /* "(tree fragment)":3 * def __reduce_cython__(self): @@ -21204,18 +21368,18 @@ static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" */ - __pyx_codeobj__35 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__19, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_setstate_cython, 3, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__35)) __PYX_ERR(1, 3, __pyx_L1_error) + __pyx_codeobj__36 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__20, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_setstate_cython, 3, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__36)) __PYX_ERR(1, 3, __pyx_L1_error) /* "(tree fragment)":1 * def __pyx_unpickle_RawRequestMessage(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< * cdef object __pyx_PickleError * cdef object __pyx_result */ - __pyx_tuple__36 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__36)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__36); - __Pyx_GIVEREF(__pyx_tuple__36); - __pyx_codeobj__37 = (PyObject*)__Pyx_PyCode_New(3, 0, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__36, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_RawRequestMessage, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__37)) __PYX_ERR(1, 1, __pyx_L1_error) - __pyx_codeobj__38 = (PyObject*)__Pyx_PyCode_New(3, 0, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__36, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_RawResponseMessag, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__38)) __PYX_ERR(1, 1, __pyx_L1_error) + __pyx_tuple__37 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__37)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__37); + __Pyx_GIVEREF(__pyx_tuple__37); + __pyx_codeobj__38 = (PyObject*)__Pyx_PyCode_New(3, 0, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__37, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_RawRequestMessage, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__38)) __PYX_ERR(1, 1, __pyx_L1_error) + __pyx_codeobj__39 = (PyObject*)__Pyx_PyCode_New(3, 0, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__37, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_RawResponseMessag, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__39)) __PYX_ERR(1, 1, __pyx_L1_error) __Pyx_RefNannyFinishContext(); return 0; __pyx_L1_error:; @@ -21301,15 +21465,15 @@ static int __Pyx_modinit_type_init_code(void) { __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); /*--- Type init code ---*/ #if CYTHON_USE_TYPE_SPECS - __pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage = (PyTypeObject *) __Pyx_PyType_FromModuleAndSpec(__pyx_m, &__pyx_type_7aiohttp_12_http_parser_RawRequestMessage_spec, NULL); if (unlikely(!__pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage)) __PYX_ERR(0, 110, __pyx_L1_error) - if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_7aiohttp_12_http_parser_RawRequestMessage_spec, __pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage) < 0) __PYX_ERR(0, 110, __pyx_L1_error) + __pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage = (PyTypeObject *) __Pyx_PyType_FromModuleAndSpec(__pyx_m, &__pyx_type_7aiohttp_12_http_parser_RawRequestMessage_spec, NULL); if (unlikely(!__pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage)) __PYX_ERR(0, 111, __pyx_L1_error) + if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_7aiohttp_12_http_parser_RawRequestMessage_spec, __pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage) < 0) __PYX_ERR(0, 111, __pyx_L1_error) #else __pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage = &__pyx_type_7aiohttp_12_http_parser_RawRequestMessage; #endif #if !CYTHON_COMPILING_IN_LIMITED_API #endif #if !CYTHON_USE_TYPE_SPECS - if (__Pyx_PyType_Ready(__pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage) < 0) __PYX_ERR(0, 110, __pyx_L1_error) + if (__Pyx_PyType_Ready(__pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage) < 0) __PYX_ERR(0, 111, __pyx_L1_error) #endif #if PY_MAJOR_VERSION < 3 __pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage->tp_print = 0; @@ -21319,20 +21483,20 @@ static int __Pyx_modinit_type_init_code(void) { __pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage->tp_getattro = __Pyx_PyObject_GenericGetAttr; } #endif - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_RawRequestMessage_2, (PyObject *) __pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage) < 0) __PYX_ERR(0, 110, __pyx_L1_error) + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_RawRequestMessage_2, (PyObject *) __pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage) < 0) __PYX_ERR(0, 111, __pyx_L1_error) #if !CYTHON_COMPILING_IN_LIMITED_API - if (__Pyx_setup_reduce((PyObject *) __pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage) < 0) __PYX_ERR(0, 110, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject *) __pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage) < 0) __PYX_ERR(0, 111, __pyx_L1_error) #endif #if CYTHON_USE_TYPE_SPECS - __pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage = (PyTypeObject *) __Pyx_PyType_FromModuleAndSpec(__pyx_m, &__pyx_type_7aiohttp_12_http_parser_RawResponseMessage_spec, NULL); if (unlikely(!__pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage)) __PYX_ERR(0, 210, __pyx_L1_error) - if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_7aiohttp_12_http_parser_RawResponseMessage_spec, __pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage) < 0) __PYX_ERR(0, 210, __pyx_L1_error) + __pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage = (PyTypeObject *) __Pyx_PyType_FromModuleAndSpec(__pyx_m, &__pyx_type_7aiohttp_12_http_parser_RawResponseMessage_spec, NULL); if (unlikely(!__pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage)) __PYX_ERR(0, 211, __pyx_L1_error) + if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_7aiohttp_12_http_parser_RawResponseMessage_spec, __pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage) < 0) __PYX_ERR(0, 211, __pyx_L1_error) #else __pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage = &__pyx_type_7aiohttp_12_http_parser_RawResponseMessage; #endif #if !CYTHON_COMPILING_IN_LIMITED_API #endif #if !CYTHON_USE_TYPE_SPECS - if (__Pyx_PyType_Ready(__pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage) < 0) __PYX_ERR(0, 210, __pyx_L1_error) + if (__Pyx_PyType_Ready(__pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage) < 0) __PYX_ERR(0, 211, __pyx_L1_error) #endif #if PY_MAJOR_VERSION < 3 __pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage->tp_print = 0; @@ -21342,9 +21506,9 @@ static int __Pyx_modinit_type_init_code(void) { __pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage->tp_getattro = __Pyx_PyObject_GenericGetAttr; } #endif - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_RawResponseMessage_2, (PyObject *) __pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage) < 0) __PYX_ERR(0, 210, __pyx_L1_error) + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_RawResponseMessage_2, (PyObject *) __pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage) < 0) __PYX_ERR(0, 211, __pyx_L1_error) #if !CYTHON_COMPILING_IN_LIMITED_API - if (__Pyx_setup_reduce((PyObject *) __pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage) < 0) __PYX_ERR(0, 210, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject *) __pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage) < 0) __PYX_ERR(0, 211, __pyx_L1_error) #endif __pyx_vtabptr_7aiohttp_12_http_parser_HttpParser = &__pyx_vtable_7aiohttp_12_http_parser_HttpParser; __pyx_vtable_7aiohttp_12_http_parser_HttpParser._init = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *, enum llhttp_type, PyObject *, PyObject *, int, struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__init *__pyx_optional_args))__pyx_f_7aiohttp_12_http_parser_10HttpParser__init; @@ -21358,15 +21522,15 @@ static int __Pyx_modinit_type_init_code(void) { __pyx_vtable_7aiohttp_12_http_parser_HttpParser._on_status_complete = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *))__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_status_complete; __pyx_vtable_7aiohttp_12_http_parser_HttpParser.http_version = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *))__pyx_f_7aiohttp_12_http_parser_10HttpParser_http_version; #if CYTHON_USE_TYPE_SPECS - __pyx_ptype_7aiohttp_12_http_parser_HttpParser = (PyTypeObject *) __Pyx_PyType_FromModuleAndSpec(__pyx_m, &__pyx_type_7aiohttp_12_http_parser_HttpParser_spec, NULL); if (unlikely(!__pyx_ptype_7aiohttp_12_http_parser_HttpParser)) __PYX_ERR(0, 272, __pyx_L1_error) - if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_7aiohttp_12_http_parser_HttpParser_spec, __pyx_ptype_7aiohttp_12_http_parser_HttpParser) < 0) __PYX_ERR(0, 272, __pyx_L1_error) + __pyx_ptype_7aiohttp_12_http_parser_HttpParser = (PyTypeObject *) __Pyx_PyType_FromModuleAndSpec(__pyx_m, &__pyx_type_7aiohttp_12_http_parser_HttpParser_spec, NULL); if (unlikely(!__pyx_ptype_7aiohttp_12_http_parser_HttpParser)) __PYX_ERR(0, 273, __pyx_L1_error) + if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_7aiohttp_12_http_parser_HttpParser_spec, __pyx_ptype_7aiohttp_12_http_parser_HttpParser) < 0) __PYX_ERR(0, 273, __pyx_L1_error) #else __pyx_ptype_7aiohttp_12_http_parser_HttpParser = &__pyx_type_7aiohttp_12_http_parser_HttpParser; #endif #if !CYTHON_COMPILING_IN_LIMITED_API #endif #if !CYTHON_USE_TYPE_SPECS - if (__Pyx_PyType_Ready(__pyx_ptype_7aiohttp_12_http_parser_HttpParser) < 0) __PYX_ERR(0, 272, __pyx_L1_error) + if (__Pyx_PyType_Ready(__pyx_ptype_7aiohttp_12_http_parser_HttpParser) < 0) __PYX_ERR(0, 273, __pyx_L1_error) #endif #if PY_MAJOR_VERSION < 3 __pyx_ptype_7aiohttp_12_http_parser_HttpParser->tp_print = 0; @@ -21376,23 +21540,23 @@ static int __Pyx_modinit_type_init_code(void) { __pyx_ptype_7aiohttp_12_http_parser_HttpParser->tp_getattro = __Pyx_PyObject_GenericGetAttr; } #endif - if (__Pyx_SetVtable(__pyx_ptype_7aiohttp_12_http_parser_HttpParser, __pyx_vtabptr_7aiohttp_12_http_parser_HttpParser) < 0) __PYX_ERR(0, 272, __pyx_L1_error) + if (__Pyx_SetVtable(__pyx_ptype_7aiohttp_12_http_parser_HttpParser, __pyx_vtabptr_7aiohttp_12_http_parser_HttpParser) < 0) __PYX_ERR(0, 273, __pyx_L1_error) #if !CYTHON_COMPILING_IN_LIMITED_API - if (__Pyx_MergeVtables(__pyx_ptype_7aiohttp_12_http_parser_HttpParser) < 0) __PYX_ERR(0, 272, __pyx_L1_error) + if (__Pyx_MergeVtables(__pyx_ptype_7aiohttp_12_http_parser_HttpParser) < 0) __PYX_ERR(0, 273, __pyx_L1_error) #endif #if !CYTHON_COMPILING_IN_LIMITED_API - if (__Pyx_setup_reduce((PyObject *) __pyx_ptype_7aiohttp_12_http_parser_HttpParser) < 0) __PYX_ERR(0, 272, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject *) __pyx_ptype_7aiohttp_12_http_parser_HttpParser) < 0) __PYX_ERR(0, 273, __pyx_L1_error) #endif __pyx_vtabptr_7aiohttp_12_http_parser_HttpRequestParser = &__pyx_vtable_7aiohttp_12_http_parser_HttpRequestParser; __pyx_vtable_7aiohttp_12_http_parser_HttpRequestParser.__pyx_base = *__pyx_vtabptr_7aiohttp_12_http_parser_HttpParser; __pyx_vtable_7aiohttp_12_http_parser_HttpRequestParser.__pyx_base._on_status_complete = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *))__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_complete; #if CYTHON_USE_TYPE_SPECS - __pyx_t_1 = PyTuple_Pack(1, (PyObject *)__pyx_ptype_7aiohttp_12_http_parser_HttpParser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 574, __pyx_L1_error) + __pyx_t_1 = PyTuple_Pack(1, (PyObject *)__pyx_ptype_7aiohttp_12_http_parser_HttpParser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 580, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_ptype_7aiohttp_12_http_parser_HttpRequestParser = (PyTypeObject *) __Pyx_PyType_FromModuleAndSpec(__pyx_m, &__pyx_type_7aiohttp_12_http_parser_HttpRequestParser_spec, __pyx_t_1); __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; - if (unlikely(!__pyx_ptype_7aiohttp_12_http_parser_HttpRequestParser)) __PYX_ERR(0, 574, __pyx_L1_error) - if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_7aiohttp_12_http_parser_HttpRequestParser_spec, __pyx_ptype_7aiohttp_12_http_parser_HttpRequestParser) < 0) __PYX_ERR(0, 574, __pyx_L1_error) + if (unlikely(!__pyx_ptype_7aiohttp_12_http_parser_HttpRequestParser)) __PYX_ERR(0, 580, __pyx_L1_error) + if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_7aiohttp_12_http_parser_HttpRequestParser_spec, __pyx_ptype_7aiohttp_12_http_parser_HttpRequestParser) < 0) __PYX_ERR(0, 580, __pyx_L1_error) #else __pyx_ptype_7aiohttp_12_http_parser_HttpRequestParser = &__pyx_type_7aiohttp_12_http_parser_HttpRequestParser; #endif @@ -21400,7 +21564,7 @@ static int __Pyx_modinit_type_init_code(void) { __pyx_ptype_7aiohttp_12_http_parser_HttpRequestParser->tp_base = __pyx_ptype_7aiohttp_12_http_parser_HttpParser; #endif #if !CYTHON_USE_TYPE_SPECS - if (__Pyx_PyType_Ready(__pyx_ptype_7aiohttp_12_http_parser_HttpRequestParser) < 0) __PYX_ERR(0, 574, __pyx_L1_error) + if (__Pyx_PyType_Ready(__pyx_ptype_7aiohttp_12_http_parser_HttpRequestParser) < 0) __PYX_ERR(0, 580, __pyx_L1_error) #endif #if PY_MAJOR_VERSION < 3 __pyx_ptype_7aiohttp_12_http_parser_HttpRequestParser->tp_print = 0; @@ -21410,24 +21574,24 @@ static int __Pyx_modinit_type_init_code(void) { __pyx_ptype_7aiohttp_12_http_parser_HttpRequestParser->tp_getattro = __Pyx_PyObject_GenericGetAttr; } #endif - if (__Pyx_SetVtable(__pyx_ptype_7aiohttp_12_http_parser_HttpRequestParser, __pyx_vtabptr_7aiohttp_12_http_parser_HttpRequestParser) < 0) __PYX_ERR(0, 574, __pyx_L1_error) + if (__Pyx_SetVtable(__pyx_ptype_7aiohttp_12_http_parser_HttpRequestParser, __pyx_vtabptr_7aiohttp_12_http_parser_HttpRequestParser) < 0) __PYX_ERR(0, 580, __pyx_L1_error) #if !CYTHON_COMPILING_IN_LIMITED_API - if (__Pyx_MergeVtables(__pyx_ptype_7aiohttp_12_http_parser_HttpRequestParser) < 0) __PYX_ERR(0, 574, __pyx_L1_error) + if (__Pyx_MergeVtables(__pyx_ptype_7aiohttp_12_http_parser_HttpRequestParser) < 0) __PYX_ERR(0, 580, __pyx_L1_error) #endif - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_HttpRequestParser, (PyObject *) __pyx_ptype_7aiohttp_12_http_parser_HttpRequestParser) < 0) __PYX_ERR(0, 574, __pyx_L1_error) + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_HttpRequestParser, (PyObject *) __pyx_ptype_7aiohttp_12_http_parser_HttpRequestParser) < 0) __PYX_ERR(0, 580, __pyx_L1_error) #if !CYTHON_COMPILING_IN_LIMITED_API - if (__Pyx_setup_reduce((PyObject *) __pyx_ptype_7aiohttp_12_http_parser_HttpRequestParser) < 0) __PYX_ERR(0, 574, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject *) __pyx_ptype_7aiohttp_12_http_parser_HttpRequestParser) < 0) __PYX_ERR(0, 580, __pyx_L1_error) #endif __pyx_vtabptr_7aiohttp_12_http_parser_HttpResponseParser = &__pyx_vtable_7aiohttp_12_http_parser_HttpResponseParser; __pyx_vtable_7aiohttp_12_http_parser_HttpResponseParser.__pyx_base = *__pyx_vtabptr_7aiohttp_12_http_parser_HttpParser; __pyx_vtable_7aiohttp_12_http_parser_HttpResponseParser.__pyx_base._on_status_complete = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *))__pyx_f_7aiohttp_12_http_parser_18HttpResponseParser__on_status_complete; #if CYTHON_USE_TYPE_SPECS - __pyx_t_1 = PyTuple_Pack(1, (PyObject *)__pyx_ptype_7aiohttp_12_http_parser_HttpParser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 638, __pyx_L1_error) + __pyx_t_1 = PyTuple_Pack(1, (PyObject *)__pyx_ptype_7aiohttp_12_http_parser_HttpParser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 644, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_ptype_7aiohttp_12_http_parser_HttpResponseParser = (PyTypeObject *) __Pyx_PyType_FromModuleAndSpec(__pyx_m, &__pyx_type_7aiohttp_12_http_parser_HttpResponseParser_spec, __pyx_t_1); __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; - if (unlikely(!__pyx_ptype_7aiohttp_12_http_parser_HttpResponseParser)) __PYX_ERR(0, 638, __pyx_L1_error) - if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_7aiohttp_12_http_parser_HttpResponseParser_spec, __pyx_ptype_7aiohttp_12_http_parser_HttpResponseParser) < 0) __PYX_ERR(0, 638, __pyx_L1_error) + if (unlikely(!__pyx_ptype_7aiohttp_12_http_parser_HttpResponseParser)) __PYX_ERR(0, 644, __pyx_L1_error) + if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_7aiohttp_12_http_parser_HttpResponseParser_spec, __pyx_ptype_7aiohttp_12_http_parser_HttpResponseParser) < 0) __PYX_ERR(0, 644, __pyx_L1_error) #else __pyx_ptype_7aiohttp_12_http_parser_HttpResponseParser = &__pyx_type_7aiohttp_12_http_parser_HttpResponseParser; #endif @@ -21435,7 +21599,7 @@ static int __Pyx_modinit_type_init_code(void) { __pyx_ptype_7aiohttp_12_http_parser_HttpResponseParser->tp_base = __pyx_ptype_7aiohttp_12_http_parser_HttpParser; #endif #if !CYTHON_USE_TYPE_SPECS - if (__Pyx_PyType_Ready(__pyx_ptype_7aiohttp_12_http_parser_HttpResponseParser) < 0) __PYX_ERR(0, 638, __pyx_L1_error) + if (__Pyx_PyType_Ready(__pyx_ptype_7aiohttp_12_http_parser_HttpResponseParser) < 0) __PYX_ERR(0, 644, __pyx_L1_error) #endif #if PY_MAJOR_VERSION < 3 __pyx_ptype_7aiohttp_12_http_parser_HttpResponseParser->tp_print = 0; @@ -21445,24 +21609,24 @@ static int __Pyx_modinit_type_init_code(void) { __pyx_ptype_7aiohttp_12_http_parser_HttpResponseParser->tp_getattro = __Pyx_PyObject_GenericGetAttr; } #endif - if (__Pyx_SetVtable(__pyx_ptype_7aiohttp_12_http_parser_HttpResponseParser, __pyx_vtabptr_7aiohttp_12_http_parser_HttpResponseParser) < 0) __PYX_ERR(0, 638, __pyx_L1_error) + if (__Pyx_SetVtable(__pyx_ptype_7aiohttp_12_http_parser_HttpResponseParser, __pyx_vtabptr_7aiohttp_12_http_parser_HttpResponseParser) < 0) __PYX_ERR(0, 644, __pyx_L1_error) #if !CYTHON_COMPILING_IN_LIMITED_API - if (__Pyx_MergeVtables(__pyx_ptype_7aiohttp_12_http_parser_HttpResponseParser) < 0) __PYX_ERR(0, 638, __pyx_L1_error) + if (__Pyx_MergeVtables(__pyx_ptype_7aiohttp_12_http_parser_HttpResponseParser) < 0) __PYX_ERR(0, 644, __pyx_L1_error) #endif - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_HttpResponseParser, (PyObject *) __pyx_ptype_7aiohttp_12_http_parser_HttpResponseParser) < 0) __PYX_ERR(0, 638, __pyx_L1_error) + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_HttpResponseParser, (PyObject *) __pyx_ptype_7aiohttp_12_http_parser_HttpResponseParser) < 0) __PYX_ERR(0, 644, __pyx_L1_error) #if !CYTHON_COMPILING_IN_LIMITED_API - if (__Pyx_setup_reduce((PyObject *) __pyx_ptype_7aiohttp_12_http_parser_HttpResponseParser) < 0) __PYX_ERR(0, 638, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject *) __pyx_ptype_7aiohttp_12_http_parser_HttpResponseParser) < 0) __PYX_ERR(0, 644, __pyx_L1_error) #endif #if CYTHON_USE_TYPE_SPECS - __pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct__genexpr = (PyTypeObject *) __Pyx_PyType_FromModuleAndSpec(__pyx_m, &__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct__genexpr_spec, NULL); if (unlikely(!__pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct__genexpr)) __PYX_ERR(0, 147, __pyx_L1_error) - if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct__genexpr_spec, __pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct__genexpr) < 0) __PYX_ERR(0, 147, __pyx_L1_error) + __pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct__genexpr = (PyTypeObject *) __Pyx_PyType_FromModuleAndSpec(__pyx_m, &__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct__genexpr_spec, NULL); if (unlikely(!__pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct__genexpr)) __PYX_ERR(0, 148, __pyx_L1_error) + if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct__genexpr_spec, __pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct__genexpr) < 0) __PYX_ERR(0, 148, __pyx_L1_error) #else __pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct__genexpr = &__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct__genexpr; #endif #if !CYTHON_COMPILING_IN_LIMITED_API #endif #if !CYTHON_USE_TYPE_SPECS - if (__Pyx_PyType_Ready(__pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct__genexpr) < 0) __PYX_ERR(0, 147, __pyx_L1_error) + if (__Pyx_PyType_Ready(__pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct__genexpr) < 0) __PYX_ERR(0, 148, __pyx_L1_error) #endif #if PY_MAJOR_VERSION < 3 __pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct__genexpr->tp_print = 0; @@ -21473,15 +21637,15 @@ static int __Pyx_modinit_type_init_code(void) { } #endif #if CYTHON_USE_TYPE_SPECS - __pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr = (PyTypeObject *) __Pyx_PyType_FromModuleAndSpec(__pyx_m, &__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr_spec, NULL); if (unlikely(!__pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr)) __PYX_ERR(0, 244, __pyx_L1_error) - if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr_spec, __pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr) < 0) __PYX_ERR(0, 244, __pyx_L1_error) + __pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr = (PyTypeObject *) __Pyx_PyType_FromModuleAndSpec(__pyx_m, &__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr_spec, NULL); if (unlikely(!__pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr)) __PYX_ERR(0, 245, __pyx_L1_error) + if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr_spec, __pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr) < 0) __PYX_ERR(0, 245, __pyx_L1_error) #else __pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr = &__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr; #endif #if !CYTHON_COMPILING_IN_LIMITED_API #endif #if !CYTHON_USE_TYPE_SPECS - if (__Pyx_PyType_Ready(__pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr) < 0) __PYX_ERR(0, 244, __pyx_L1_error) + if (__Pyx_PyType_Ready(__pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr) < 0) __PYX_ERR(0, 245, __pyx_L1_error) #endif #if PY_MAJOR_VERSION < 3 __pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr->tp_print = 0; @@ -21509,23 +21673,23 @@ static int __Pyx_modinit_type_import_code(void) { /*--- Type import code ---*/ __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(3, 9, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_ptype_7cpython_4type_type = __Pyx_ImportType_3_0_5(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "type", + __pyx_ptype_7cpython_4type_type = __Pyx_ImportType_3_0_10(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "type", #if defined(PYPY_VERSION_NUM) && PYPY_VERSION_NUM < 0x050B0000 - sizeof(PyTypeObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_5(PyTypeObject), + sizeof(PyTypeObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_10(PyTypeObject), #elif CYTHON_COMPILING_IN_LIMITED_API - sizeof(PyTypeObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_5(PyTypeObject), + sizeof(PyTypeObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_10(PyTypeObject), #else - sizeof(PyHeapTypeObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_5(PyHeapTypeObject), + sizeof(PyHeapTypeObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_10(PyHeapTypeObject), #endif - __Pyx_ImportType_CheckSize_Warn_3_0_5); if (!__pyx_ptype_7cpython_4type_type) __PYX_ERR(3, 9, __pyx_L1_error) + __Pyx_ImportType_CheckSize_Warn_3_0_10); if (!__pyx_ptype_7cpython_4type_type) __PYX_ERR(3, 9, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(4, 8, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_ptype_7cpython_4bool_bool = __Pyx_ImportType_3_0_5(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "bool", sizeof(PyBoolObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_5(PyBoolObject),__Pyx_ImportType_CheckSize_Warn_3_0_5); if (!__pyx_ptype_7cpython_4bool_bool) __PYX_ERR(4, 8, __pyx_L1_error) + __pyx_ptype_7cpython_4bool_bool = __Pyx_ImportType_3_0_10(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "bool", sizeof(PyBoolObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_10(PyBoolObject),__Pyx_ImportType_CheckSize_Warn_3_0_10); if (!__pyx_ptype_7cpython_4bool_bool) __PYX_ERR(4, 8, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 15, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_ptype_7cpython_7complex_complex = __Pyx_ImportType_3_0_5(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "complex", sizeof(PyComplexObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_5(PyComplexObject),__Pyx_ImportType_CheckSize_Warn_3_0_5); if (!__pyx_ptype_7cpython_7complex_complex) __PYX_ERR(5, 15, __pyx_L1_error) + __pyx_ptype_7cpython_7complex_complex = __Pyx_ImportType_3_0_10(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "complex", sizeof(PyComplexObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_10(PyComplexObject),__Pyx_ImportType_CheckSize_Warn_3_0_10); if (!__pyx_ptype_7cpython_7complex_complex) __PYX_ERR(5, 15, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_RefNannyFinishContext(); return 0; @@ -21821,7 +21985,7 @@ static CYTHON_SMALL_CODE int __pyx_pymod_exec__http_parser(PyObject *__pyx_pyini __pyx_t_1 = PyModule_Create(&__pyx_moduledef); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) { int add_module_result = PyState_AddModule(__pyx_t_1, &__pyx_moduledef); - __pyx_t_1 = 0; /* transfer ownership from __pyx_t_1 to _http_parser pseudovariable */ + __pyx_t_1 = 0; /* transfer ownership from __pyx_t_1 to "_http_parser" pseudovariable */ if (unlikely((add_module_result < 0))) __PYX_ERR(0, 1, __pyx_L1_error) pystate_addmodule_run = 1; } @@ -22191,7 +22355,7 @@ if (!__Pyx_RefNanny) { __Pyx_INCREF(__pyx_n_s_hdrs); __Pyx_GIVEREF(__pyx_n_s_hdrs); if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_hdrs)) __PYX_ERR(6, 4, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s__8, __pyx_t_2, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(6, 4, __pyx_L1_error) + __pyx_t_3 = __Pyx_Import(__pyx_n_s__4, __pyx_t_2, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(6, 4, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_2)) __PYX_ERR(6, 4, __pyx_L1_error) @@ -23445,204 +23609,220 @@ if (!__Pyx_RefNanny) { __Pyx_GIVEREF(__pyx_t_3); __pyx_t_3 = 0; - /* "aiohttp/_http_parser.pyx":57 + /* "aiohttp/_http_parser.pyx":50 + * from aiohttp cimport _find_header + * + * ALLOWED_UPGRADES = frozenset({"websocket"}) # <<<<<<<<<<<<<< + * DEF DEFAULT_FREELIST_SIZE = 250 + * + */ + __pyx_t_3 = PySet_New(0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 50, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PySet_Add(__pyx_t_3, __pyx_n_u_websocket) < 0) __PYX_ERR(0, 50, __pyx_L1_error) + __pyx_t_79 = __Pyx_PyFrozenSet_New(__pyx_t_3); if (unlikely(!__pyx_t_79)) __PYX_ERR(0, 50, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_ALLOWED_UPGRADES, __pyx_t_79) < 0) __PYX_ERR(0, 50, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_79); __pyx_t_79 = 0; + + /* "aiohttp/_http_parser.pyx":58 * char* PyByteArray_AsString(object) * * __all__ = ('HttpRequestParser', 'HttpResponseParser', # <<<<<<<<<<<<<< * 'RawRequestMessage', 'RawResponseMessage') * */ - if (PyDict_SetItem(__pyx_d, __pyx_n_s_all, __pyx_tuple__14) < 0) __PYX_ERR(0, 57, __pyx_L1_error) + if (PyDict_SetItem(__pyx_d, __pyx_n_s_all, __pyx_tuple__15) < 0) __PYX_ERR(0, 58, __pyx_L1_error) - /* "aiohttp/_http_parser.pyx":60 + /* "aiohttp/_http_parser.pyx":61 * 'RawRequestMessage', 'RawResponseMessage') * * cdef object URL = _URL # <<<<<<<<<<<<<< * cdef object URL_build = URL.build * cdef object CIMultiDict = _CIMultiDict */ - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_URL_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 60, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); + __Pyx_GetModuleGlobalName(__pyx_t_79, __pyx_n_s_URL_2); if (unlikely(!__pyx_t_79)) __PYX_ERR(0, 61, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_URL); - __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_URL, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - __pyx_t_3 = 0; + __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_URL, __pyx_t_79); + __Pyx_GIVEREF(__pyx_t_79); + __pyx_t_79 = 0; - /* "aiohttp/_http_parser.pyx":61 + /* "aiohttp/_http_parser.pyx":62 * * cdef object URL = _URL * cdef object URL_build = URL.build # <<<<<<<<<<<<<< * cdef object CIMultiDict = _CIMultiDict * cdef object CIMultiDictProxy = _CIMultiDictProxy */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_7aiohttp_12_http_parser_URL, __pyx_n_s_build); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 61, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); + __pyx_t_79 = __Pyx_PyObject_GetAttrStr(__pyx_v_7aiohttp_12_http_parser_URL, __pyx_n_s_build); if (unlikely(!__pyx_t_79)) __PYX_ERR(0, 62, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_URL_build); - __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_URL_build, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - __pyx_t_3 = 0; + __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_URL_build, __pyx_t_79); + __Pyx_GIVEREF(__pyx_t_79); + __pyx_t_79 = 0; - /* "aiohttp/_http_parser.pyx":62 + /* "aiohttp/_http_parser.pyx":63 * cdef object URL = _URL * cdef object URL_build = URL.build * cdef object CIMultiDict = _CIMultiDict # <<<<<<<<<<<<<< * cdef object CIMultiDictProxy = _CIMultiDictProxy * cdef object HttpVersion = _HttpVersion */ - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_CIMultiDict_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 62, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); + __Pyx_GetModuleGlobalName(__pyx_t_79, __pyx_n_s_CIMultiDict_2); if (unlikely(!__pyx_t_79)) __PYX_ERR(0, 63, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_CIMultiDict); - __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_CIMultiDict, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - __pyx_t_3 = 0; + __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_CIMultiDict, __pyx_t_79); + __Pyx_GIVEREF(__pyx_t_79); + __pyx_t_79 = 0; - /* "aiohttp/_http_parser.pyx":63 + /* "aiohttp/_http_parser.pyx":64 * cdef object URL_build = URL.build * cdef object CIMultiDict = _CIMultiDict * cdef object CIMultiDictProxy = _CIMultiDictProxy # <<<<<<<<<<<<<< * cdef object HttpVersion = _HttpVersion * cdef object HttpVersion10 = _HttpVersion10 */ - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_CIMultiDictProxy_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 63, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); + __Pyx_GetModuleGlobalName(__pyx_t_79, __pyx_n_s_CIMultiDictProxy_2); if (unlikely(!__pyx_t_79)) __PYX_ERR(0, 64, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_CIMultiDictProxy); - __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_CIMultiDictProxy, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - __pyx_t_3 = 0; + __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_CIMultiDictProxy, __pyx_t_79); + __Pyx_GIVEREF(__pyx_t_79); + __pyx_t_79 = 0; - /* "aiohttp/_http_parser.pyx":64 + /* "aiohttp/_http_parser.pyx":65 * cdef object CIMultiDict = _CIMultiDict * cdef object CIMultiDictProxy = _CIMultiDictProxy * cdef object HttpVersion = _HttpVersion # <<<<<<<<<<<<<< * cdef object HttpVersion10 = _HttpVersion10 * cdef object HttpVersion11 = _HttpVersion11 */ - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_HttpVersion_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 64, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); + __Pyx_GetModuleGlobalName(__pyx_t_79, __pyx_n_s_HttpVersion_2); if (unlikely(!__pyx_t_79)) __PYX_ERR(0, 65, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_HttpVersion); - __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_HttpVersion, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - __pyx_t_3 = 0; + __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_HttpVersion, __pyx_t_79); + __Pyx_GIVEREF(__pyx_t_79); + __pyx_t_79 = 0; - /* "aiohttp/_http_parser.pyx":65 + /* "aiohttp/_http_parser.pyx":66 * cdef object CIMultiDictProxy = _CIMultiDictProxy * cdef object HttpVersion = _HttpVersion * cdef object HttpVersion10 = _HttpVersion10 # <<<<<<<<<<<<<< * cdef object HttpVersion11 = _HttpVersion11 * cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1 */ - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_HttpVersion10_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 65, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); + __Pyx_GetModuleGlobalName(__pyx_t_79, __pyx_n_s_HttpVersion10_2); if (unlikely(!__pyx_t_79)) __PYX_ERR(0, 66, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_HttpVersion10); - __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_HttpVersion10, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - __pyx_t_3 = 0; + __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_HttpVersion10, __pyx_t_79); + __Pyx_GIVEREF(__pyx_t_79); + __pyx_t_79 = 0; - /* "aiohttp/_http_parser.pyx":66 + /* "aiohttp/_http_parser.pyx":67 * cdef object HttpVersion = _HttpVersion * cdef object HttpVersion10 = _HttpVersion10 * cdef object HttpVersion11 = _HttpVersion11 # <<<<<<<<<<<<<< * cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1 * cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING */ - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_HttpVersion11_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 66, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); + __Pyx_GetModuleGlobalName(__pyx_t_79, __pyx_n_s_HttpVersion11_2); if (unlikely(!__pyx_t_79)) __PYX_ERR(0, 67, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_HttpVersion11); - __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_HttpVersion11, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - __pyx_t_3 = 0; + __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_HttpVersion11, __pyx_t_79); + __Pyx_GIVEREF(__pyx_t_79); + __pyx_t_79 = 0; - /* "aiohttp/_http_parser.pyx":67 + /* "aiohttp/_http_parser.pyx":68 * cdef object HttpVersion10 = _HttpVersion10 * cdef object HttpVersion11 = _HttpVersion11 * cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1 # <<<<<<<<<<<<<< * cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING * cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD */ - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 67, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_79 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_SEC_WEBSOCKET_KEY1); if (unlikely(!__pyx_t_79)) __PYX_ERR(0, 67, __pyx_L1_error) + __Pyx_GetModuleGlobalName(__pyx_t_79, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_79)) __PYX_ERR(0, 68, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_79); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_79, __pyx_n_s_SEC_WEBSOCKET_KEY1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 68, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_79); __pyx_t_79 = 0; __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_SEC_WEBSOCKET_KEY1); - __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_SEC_WEBSOCKET_KEY1, __pyx_t_79); - __Pyx_GIVEREF(__pyx_t_79); - __pyx_t_79 = 0; + __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_SEC_WEBSOCKET_KEY1, __pyx_t_3); + __Pyx_GIVEREF(__pyx_t_3); + __pyx_t_3 = 0; - /* "aiohttp/_http_parser.pyx":68 + /* "aiohttp/_http_parser.pyx":69 * cdef object HttpVersion11 = _HttpVersion11 * cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1 * cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING # <<<<<<<<<<<<<< * cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD * cdef object StreamReader = _StreamReader */ - __Pyx_GetModuleGlobalName(__pyx_t_79, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_79)) __PYX_ERR(0, 68, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_79); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_79, __pyx_n_s_CONTENT_ENCODING); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 68, __pyx_L1_error) + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 69, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_79); __pyx_t_79 = 0; + __pyx_t_79 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_CONTENT_ENCODING); if (unlikely(!__pyx_t_79)) __PYX_ERR(0, 69, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_CONTENT_ENCODING); - __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_CONTENT_ENCODING, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - __pyx_t_3 = 0; + __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_CONTENT_ENCODING, __pyx_t_79); + __Pyx_GIVEREF(__pyx_t_79); + __pyx_t_79 = 0; - /* "aiohttp/_http_parser.pyx":69 + /* "aiohttp/_http_parser.pyx":70 * cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1 * cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING * cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD # <<<<<<<<<<<<<< * cdef object StreamReader = _StreamReader * cdef object DeflateBuffer = _DeflateBuffer */ - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_EMPTY_PAYLOAD_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 69, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); + __Pyx_GetModuleGlobalName(__pyx_t_79, __pyx_n_s_EMPTY_PAYLOAD_2); if (unlikely(!__pyx_t_79)) __PYX_ERR(0, 70, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_EMPTY_PAYLOAD); - __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_EMPTY_PAYLOAD, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - __pyx_t_3 = 0; + __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_EMPTY_PAYLOAD, __pyx_t_79); + __Pyx_GIVEREF(__pyx_t_79); + __pyx_t_79 = 0; - /* "aiohttp/_http_parser.pyx":70 + /* "aiohttp/_http_parser.pyx":71 * cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING * cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD * cdef object StreamReader = _StreamReader # <<<<<<<<<<<<<< * cdef object DeflateBuffer = _DeflateBuffer * */ - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_StreamReader_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 70, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); + __Pyx_GetModuleGlobalName(__pyx_t_79, __pyx_n_s_StreamReader_2); if (unlikely(!__pyx_t_79)) __PYX_ERR(0, 71, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_StreamReader); - __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_StreamReader, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - __pyx_t_3 = 0; + __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_StreamReader, __pyx_t_79); + __Pyx_GIVEREF(__pyx_t_79); + __pyx_t_79 = 0; - /* "aiohttp/_http_parser.pyx":71 + /* "aiohttp/_http_parser.pyx":72 * cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD * cdef object StreamReader = _StreamReader * cdef object DeflateBuffer = _DeflateBuffer # <<<<<<<<<<<<<< * * */ - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_DeflateBuffer_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 71, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); + __Pyx_GetModuleGlobalName(__pyx_t_79, __pyx_n_s_DeflateBuffer_2); if (unlikely(!__pyx_t_79)) __PYX_ERR(0, 72, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_DeflateBuffer); - __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_DeflateBuffer, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - __pyx_t_3 = 0; + __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_DeflateBuffer, __pyx_t_79); + __Pyx_GIVEREF(__pyx_t_79); + __pyx_t_79 = 0; - /* "aiohttp/_http_parser.pyx":85 + /* "aiohttp/_http_parser.pyx":86 * DEF METHODS_COUNT = 46; * * cdef list _http_method = [] # <<<<<<<<<<<<<< * * for i in range(METHODS_COUNT): */ - __pyx_t_3 = PyList_New(0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 85, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); + __pyx_t_79 = PyList_New(0); if (unlikely(!__pyx_t_79)) __PYX_ERR(0, 86, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser__http_method); - __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser__http_method, ((PyObject*)__pyx_t_3)); - __Pyx_GIVEREF(__pyx_t_3); - __pyx_t_3 = 0; + __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser__http_method, ((PyObject*)__pyx_t_79)); + __Pyx_GIVEREF(__pyx_t_79); + __pyx_t_79 = 0; - /* "aiohttp/_http_parser.pyx":87 + /* "aiohttp/_http_parser.pyx":88 * cdef list _http_method = [] * * for i in range(METHODS_COUNT): # <<<<<<<<<<<<<< @@ -23650,12 +23830,12 @@ if (!__Pyx_RefNanny) { * cparser.llhttp_method_name( i).decode('ascii')) */ for (__pyx_t_80 = 0; __pyx_t_80 < 46; __pyx_t_80+=1) { - __pyx_t_3 = __Pyx_PyInt_From_long(__pyx_t_80); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 87, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_i, __pyx_t_3) < 0) __PYX_ERR(0, 87, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_79 = __Pyx_PyInt_From_long(__pyx_t_80); if (unlikely(!__pyx_t_79)) __PYX_ERR(0, 88, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_i, __pyx_t_79) < 0) __PYX_ERR(0, 88, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_79); __pyx_t_79 = 0; - /* "aiohttp/_http_parser.pyx":88 + /* "aiohttp/_http_parser.pyx":89 * * for i in range(METHODS_COUNT): * _http_method.append( # <<<<<<<<<<<<<< @@ -23664,47 +23844,47 @@ if (!__Pyx_RefNanny) { */ if (unlikely(__pyx_v_7aiohttp_12_http_parser__http_method == Py_None)) { PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "append"); - __PYX_ERR(0, 88, __pyx_L1_error) + __PYX_ERR(0, 89, __pyx_L1_error) } - /* "aiohttp/_http_parser.pyx":89 + /* "aiohttp/_http_parser.pyx":90 * for i in range(METHODS_COUNT): * _http_method.append( * cparser.llhttp_method_name( i).decode('ascii')) # <<<<<<<<<<<<<< * * */ - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_i); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 89, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_81 = ((enum llhttp_method)__Pyx_PyInt_As_enum__llhttp_method(__pyx_t_3)); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 89, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GetModuleGlobalName(__pyx_t_79, __pyx_n_s_i); if (unlikely(!__pyx_t_79)) __PYX_ERR(0, 90, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); + __pyx_t_81 = ((enum llhttp_method)__Pyx_PyInt_As_enum__llhttp_method(__pyx_t_79)); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 90, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_79); __pyx_t_79 = 0; __pyx_t_82 = llhttp_method_name(((llhttp_method_t)__pyx_t_81)); - __pyx_t_83 = __Pyx_ssize_strlen(__pyx_t_82); if (unlikely(__pyx_t_83 == ((Py_ssize_t)-1))) __PYX_ERR(0, 89, __pyx_L1_error) - __pyx_t_3 = __Pyx_decode_c_string(__pyx_t_82, 0, __pyx_t_83, NULL, NULL, PyUnicode_DecodeASCII); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 89, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); + __pyx_t_83 = __Pyx_ssize_strlen(__pyx_t_82); if (unlikely(__pyx_t_83 == ((Py_ssize_t)-1))) __PYX_ERR(0, 90, __pyx_L1_error) + __pyx_t_79 = __Pyx_decode_c_string(__pyx_t_82, 0, __pyx_t_83, NULL, NULL, PyUnicode_DecodeASCII); if (unlikely(!__pyx_t_79)) __PYX_ERR(0, 90, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); - /* "aiohttp/_http_parser.pyx":88 + /* "aiohttp/_http_parser.pyx":89 * * for i in range(METHODS_COUNT): * _http_method.append( # <<<<<<<<<<<<<< * cparser.llhttp_method_name( i).decode('ascii')) * */ - __pyx_t_84 = __Pyx_PyList_Append(__pyx_v_7aiohttp_12_http_parser__http_method, __pyx_t_3); if (unlikely(__pyx_t_84 == ((int)-1))) __PYX_ERR(0, 88, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_84 = __Pyx_PyList_Append(__pyx_v_7aiohttp_12_http_parser__http_method, __pyx_t_79); if (unlikely(__pyx_t_84 == ((int)-1))) __PYX_ERR(0, 89, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_79); __pyx_t_79 = 0; } - /* "aiohttp/_http_parser.pyx":150 + /* "aiohttp/_http_parser.pyx":151 * return '' * * def _replace(self, **dct): # <<<<<<<<<<<<<< * cdef RawRequestMessage ret * ret = _new_request_message(self.method, */ - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_17RawRequestMessage_5_replace, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_RawRequestMessage__replace, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__16)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 150, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage, __pyx_n_s_replace, __pyx_t_3) < 0) __PYX_ERR(0, 150, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_79 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_17RawRequestMessage_5_replace, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_RawRequestMessage__replace, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__17)); if (unlikely(!__pyx_t_79)) __PYX_ERR(0, 151, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage, __pyx_n_s_replace, __pyx_t_79) < 0) __PYX_ERR(0, 151, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_79); __pyx_t_79 = 0; PyType_Modified(__pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage); /* "(tree fragment)":1 @@ -23712,10 +23892,10 @@ if (!__Pyx_RefNanny) { * cdef tuple state * cdef object _dict */ - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_17RawRequestMessage_7__reduce_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_RawRequestMessage___reduce_cytho, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__18)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage, __pyx_n_s_reduce_cython, __pyx_t_3) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_79 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_17RawRequestMessage_7__reduce_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_RawRequestMessage___reduce_cytho, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__19)); if (unlikely(!__pyx_t_79)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage, __pyx_n_s_reduce_cython, __pyx_t_79) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_79); __pyx_t_79 = 0; PyType_Modified(__pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage); /* "(tree fragment)":16 @@ -23724,10 +23904,10 @@ if (!__Pyx_RefNanny) { * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< * __pyx_unpickle_RawRequestMessage__set_state(self, __pyx_state) */ - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_17RawRequestMessage_9__setstate_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_RawRequestMessage___setstate_cyt, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__20)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 16, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage, __pyx_n_s_setstate_cython, __pyx_t_3) < 0) __PYX_ERR(1, 16, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_79 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_17RawRequestMessage_9__setstate_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_RawRequestMessage___setstate_cyt, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__21)); if (unlikely(!__pyx_t_79)) __PYX_ERR(1, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage, __pyx_n_s_setstate_cython, __pyx_t_79) < 0) __PYX_ERR(1, 16, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_79); __pyx_t_79 = 0; PyType_Modified(__pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage); /* "(tree fragment)":1 @@ -23735,10 +23915,10 @@ if (!__Pyx_RefNanny) { * cdef tuple state * cdef object _dict */ - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_18RawResponseMessage_5__reduce_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_RawResponseMessage___reduce_cyth, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__21)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage, __pyx_n_s_reduce_cython, __pyx_t_3) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_79 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_18RawResponseMessage_5__reduce_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_RawResponseMessage___reduce_cyth, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__22)); if (unlikely(!__pyx_t_79)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage, __pyx_n_s_reduce_cython, __pyx_t_79) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_79); __pyx_t_79 = 0; PyType_Modified(__pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage); /* "(tree fragment)":16 @@ -23747,49 +23927,49 @@ if (!__Pyx_RefNanny) { * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< * __pyx_unpickle_RawResponseMessage__set_state(self, __pyx_state) */ - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_18RawResponseMessage_7__setstate_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_RawResponseMessage___setstate_cy, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__22)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 16, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage, __pyx_n_s_setstate_cython, __pyx_t_3) < 0) __PYX_ERR(1, 16, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_79 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_18RawResponseMessage_7__setstate_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_RawResponseMessage___setstate_cy, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__23)); if (unlikely(!__pyx_t_79)) __PYX_ERR(1, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage, __pyx_n_s_setstate_cython, __pyx_t_79) < 0) __PYX_ERR(1, 16, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_79); __pyx_t_79 = 0; PyType_Modified(__pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage); - /* "aiohttp/_http_parser.pyx":502 + /* "aiohttp/_http_parser.pyx":508 * ### Public API ### * * def feed_eof(self): # <<<<<<<<<<<<<< * cdef bytes desc * */ - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_10HttpParser_5feed_eof, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_HttpParser_feed_eof, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__24)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 502, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_7aiohttp_12_http_parser_HttpParser, __pyx_n_s_feed_eof, __pyx_t_3) < 0) __PYX_ERR(0, 502, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_79 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_10HttpParser_5feed_eof, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_HttpParser_feed_eof, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__25)); if (unlikely(!__pyx_t_79)) __PYX_ERR(0, 508, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_7aiohttp_12_http_parser_HttpParser, __pyx_n_s_feed_eof, __pyx_t_79) < 0) __PYX_ERR(0, 508, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_79); __pyx_t_79 = 0; PyType_Modified(__pyx_ptype_7aiohttp_12_http_parser_HttpParser); - /* "aiohttp/_http_parser.pyx":522 + /* "aiohttp/_http_parser.pyx":528 * return self._messages[-1][0] * * def feed_data(self, data): # <<<<<<<<<<<<<< * cdef: * size_t data_len */ - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_10HttpParser_7feed_data, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_HttpParser_feed_data, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__26)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 522, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_7aiohttp_12_http_parser_HttpParser, __pyx_n_s_feed_data, __pyx_t_3) < 0) __PYX_ERR(0, 522, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_79 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_10HttpParser_7feed_data, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_HttpParser_feed_data, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__27)); if (unlikely(!__pyx_t_79)) __PYX_ERR(0, 528, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_7aiohttp_12_http_parser_HttpParser, __pyx_n_s_feed_data, __pyx_t_79) < 0) __PYX_ERR(0, 528, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_79); __pyx_t_79 = 0; PyType_Modified(__pyx_ptype_7aiohttp_12_http_parser_HttpParser); - /* "aiohttp/_http_parser.pyx":570 - * return messages, False, b'' + /* "aiohttp/_http_parser.pyx":576 + * return messages, False, b"" * * def set_upgraded(self, val): # <<<<<<<<<<<<<< * self._upgraded = val * */ - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_10HttpParser_9set_upgraded, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_HttpParser_set_upgraded, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__28)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 570, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_7aiohttp_12_http_parser_HttpParser, __pyx_n_s_set_upgraded, __pyx_t_3) < 0) __PYX_ERR(0, 570, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_79 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_10HttpParser_9set_upgraded, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_HttpParser_set_upgraded, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__29)); if (unlikely(!__pyx_t_79)) __PYX_ERR(0, 576, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_7aiohttp_12_http_parser_HttpParser, __pyx_n_s_set_upgraded, __pyx_t_79) < 0) __PYX_ERR(0, 576, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_79); __pyx_t_79 = 0; PyType_Modified(__pyx_ptype_7aiohttp_12_http_parser_HttpParser); /* "(tree fragment)":1 @@ -23797,10 +23977,10 @@ if (!__Pyx_RefNanny) { * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" * def __setstate_cython__(self, __pyx_state): */ - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_10HttpParser_11__reduce_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_HttpParser___reduce_cython, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__30)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_reduce_cython, __pyx_t_3) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_79 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_10HttpParser_11__reduce_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_HttpParser___reduce_cython, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__31)); if (unlikely(!__pyx_t_79)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_reduce_cython, __pyx_t_79) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_79); __pyx_t_79 = 0; /* "(tree fragment)":3 * def __reduce_cython__(self): @@ -23808,20 +23988,20 @@ if (!__Pyx_RefNanny) { * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" */ - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_10HttpParser_13__setstate_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_HttpParser___setstate_cython, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__31)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 3, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_setstate_cython, __pyx_t_3) < 0) __PYX_ERR(1, 3, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_79 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_10HttpParser_13__setstate_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_HttpParser___setstate_cython, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__32)); if (unlikely(!__pyx_t_79)) __PYX_ERR(1, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_setstate_cython, __pyx_t_79) < 0) __PYX_ERR(1, 3, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_79); __pyx_t_79 = 0; /* "(tree fragment)":1 * def __reduce_cython__(self): # <<<<<<<<<<<<<< * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" * def __setstate_cython__(self, __pyx_state): */ - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_17HttpRequestParser_3__reduce_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_HttpRequestParser___reduce_cytho, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__32)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_reduce_cython, __pyx_t_3) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_79 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_17HttpRequestParser_3__reduce_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_HttpRequestParser___reduce_cytho, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__33)); if (unlikely(!__pyx_t_79)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_reduce_cython, __pyx_t_79) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_79); __pyx_t_79 = 0; /* "(tree fragment)":3 * def __reduce_cython__(self): @@ -23829,20 +24009,20 @@ if (!__Pyx_RefNanny) { * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" */ - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_17HttpRequestParser_5__setstate_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_HttpRequestParser___setstate_cyt, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__33)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 3, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_setstate_cython, __pyx_t_3) < 0) __PYX_ERR(1, 3, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_79 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_17HttpRequestParser_5__setstate_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_HttpRequestParser___setstate_cyt, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__34)); if (unlikely(!__pyx_t_79)) __PYX_ERR(1, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_setstate_cython, __pyx_t_79) < 0) __PYX_ERR(1, 3, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_79); __pyx_t_79 = 0; /* "(tree fragment)":1 * def __reduce_cython__(self): # <<<<<<<<<<<<<< * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" * def __setstate_cython__(self, __pyx_state): */ - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_18HttpResponseParser_3__reduce_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_HttpResponseParser___reduce_cyth, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__34)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_reduce_cython, __pyx_t_3) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_79 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_18HttpResponseParser_3__reduce_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_HttpResponseParser___reduce_cyth, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__35)); if (unlikely(!__pyx_t_79)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_reduce_cython, __pyx_t_79) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_79); __pyx_t_79 = 0; /* "(tree fragment)":3 * def __reduce_cython__(self): @@ -23850,20 +24030,20 @@ if (!__Pyx_RefNanny) { * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" */ - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_18HttpResponseParser_5__setstate_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_HttpResponseParser___setstate_cy, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__35)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 3, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_setstate_cython, __pyx_t_3) < 0) __PYX_ERR(1, 3, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_79 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_18HttpResponseParser_5__setstate_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_HttpResponseParser___setstate_cy, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__36)); if (unlikely(!__pyx_t_79)) __PYX_ERR(1, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_setstate_cython, __pyx_t_79) < 0) __PYX_ERR(1, 3, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_79); __pyx_t_79 = 0; /* "(tree fragment)":1 * def __pyx_unpickle_RawRequestMessage(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< * cdef object __pyx_PickleError * cdef object __pyx_result */ - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_1__pyx_unpickle_RawRequestMessage, 0, __pyx_n_s_pyx_unpickle_RawRequestMessage, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__37)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_RawRequestMessage, __pyx_t_3) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_79 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_1__pyx_unpickle_RawRequestMessage, 0, __pyx_n_s_pyx_unpickle_RawRequestMessage, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__38)); if (unlikely(!__pyx_t_79)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_RawRequestMessage, __pyx_t_79) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_79); __pyx_t_79 = 0; /* "(tree fragment)":11 * __pyx_unpickle_RawRequestMessage__set_state( __pyx_result, __pyx_state) @@ -23872,20 +24052,20 @@ if (!__Pyx_RefNanny) { * __pyx_result.chunked = __pyx_state[0]; __pyx_result.compression = __pyx_state[1]; __pyx_result.headers = __pyx_state[2]; __pyx_result.method = __pyx_state[3]; __pyx_result.path = __pyx_state[4]; __pyx_result.raw_headers = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.url = __pyx_state[8]; __pyx_result.version = __pyx_state[9] * if len(__pyx_state) > 10 and hasattr(__pyx_result, '__dict__'): */ - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_3__pyx_unpickle_RawResponseMessage, 0, __pyx_n_s_pyx_unpickle_RawResponseMessag, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__38)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_RawResponseMessag, __pyx_t_3) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_79 = __Pyx_CyFunction_New(&__pyx_mdef_7aiohttp_12_http_parser_3__pyx_unpickle_RawResponseMessage, 0, __pyx_n_s_pyx_unpickle_RawResponseMessag, NULL, __pyx_n_s_aiohttp__http_parser, __pyx_d, ((PyObject *)__pyx_codeobj__39)); if (unlikely(!__pyx_t_79)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_RawResponseMessag, __pyx_t_79) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_79); __pyx_t_79 = 0; /* "aiohttp/_http_parser.pyx":1 * #cython: language_level=3 # <<<<<<<<<<<<<< * # * # Based on https://github.com/MagicStack/httptools */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_3) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_79 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_79)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_79) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_79); __pyx_t_79 = 0; /*--- Wrapped vars code ---*/ @@ -24279,7 +24459,7 @@ static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, } } #else - if (is_list || PySequence_Check(o)) { + if (is_list || !PyMapping_Check(o)) { return PySequence_GetItem(o, i); } #endif @@ -24512,14 +24692,14 @@ static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyO { int eq = __Pyx_PyUnicode_Equals(s, PyTuple_GET_ITEM(kwnames, i), Py_EQ); if (unlikely(eq != 0)) { - if (unlikely(eq < 0)) return NULL; // error + if (unlikely(eq < 0)) return NULL; return kwvalues[i]; } } - return NULL; // not found (no exception set) + return NULL; } #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 -static CYTHON_UNUSED PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues) { +CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues) { Py_ssize_t i, nkwargs = PyTuple_GET_SIZE(kwnames); PyObject *dict; dict = PyDict_New(); @@ -24629,7 +24809,7 @@ static int __Pyx_ParseOptionalKeywords( if (*name) { values[name-argnames] = value; #if CYTHON_AVOID_BORROWED_REFS - Py_INCREF(value); // transfer ownership of value to values + Py_INCREF(value); Py_DECREF(key); #endif key = NULL; @@ -24648,7 +24828,7 @@ static int __Pyx_ParseOptionalKeywords( && _PyString_Eq(**name, key)) { values[name-argnames] = value; #if CYTHON_AVOID_BORROWED_REFS - value = NULL; // ownership transferred to values + value = NULL; #endif break; } @@ -24680,7 +24860,7 @@ static int __Pyx_ParseOptionalKeywords( if (cmp == 0) { values[name-argnames] = value; #if CYTHON_AVOID_BORROWED_REFS - value = NULL; // ownership transferred to values + value = NULL; #endif break; } @@ -24765,9 +24945,10 @@ static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) { /* IterFinish */ static CYTHON_INLINE int __Pyx_IterFinish(void) { + PyObject* exc_type; __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign - PyObject* exc_type = __Pyx_PyErr_CurrentExceptionType(); + exc_type = __Pyx_PyErr_CurrentExceptionType(); if (unlikely(exc_type)) { if (unlikely(!__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) return -1; @@ -25621,9 +25802,10 @@ static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject /* UnpackUnboundCMethod */ static PyObject *__Pyx_SelflessCall(PyObject *method, PyObject *args, PyObject *kwargs) { + PyObject *result; PyObject *selfless_args = PyTuple_GetSlice(args, 1, PyTuple_Size(args)); if (unlikely(!selfless_args)) return NULL; - PyObject *result = PyObject_Call(method, selfless_args, kwargs); + result = PyObject_Call(method, selfless_args, kwargs); Py_DECREF(selfless_args); return result; } @@ -26077,7 +26259,7 @@ static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { if (unlikely(!module_name_str)) { goto modbad; } module_name = PyUnicode_FromString(module_name_str); if (unlikely(!module_name)) { goto modbad; } - module_dot = PyUnicode_Concat(module_name, __pyx_kp_u__12); + module_dot = PyUnicode_Concat(module_name, __pyx_kp_u__13); if (unlikely(!module_dot)) { goto modbad; } full_name = PyUnicode_Concat(module_dot, name); if (unlikely(!full_name)) { goto modbad; } @@ -26375,38 +26557,38 @@ static int __Pyx_validate_bases_tuple(const char *type_name, Py_ssize_t dictoffs #endif return -1; } -#if !CYTHON_USE_TYPE_SLOTS - if (dictoffset == 0) { - PyErr_Format(PyExc_TypeError, - "extension type '%s.200s': " - "unable to validate whether bases have a __dict__ " - "when CYTHON_USE_TYPE_SLOTS is off " - "(likely because you are building in the limited API). " - "Therefore, all extension types with multiple bases " - "must add 'cdef dict __dict__' in this compilation mode", - type_name); -#if CYTHON_AVOID_BORROWED_REFS - Py_DECREF(b0); -#endif - return -1; - } -#else - if (dictoffset == 0 && b->tp_dictoffset) + if (dictoffset == 0) { - __Pyx_TypeName b_name = __Pyx_PyType_GetName(b); - PyErr_Format(PyExc_TypeError, - "extension type '%.200s' has no __dict__ slot, " - "but base type '" __Pyx_FMT_TYPENAME "' has: " - "either add 'cdef dict __dict__' to the extension type " - "or add '__slots__ = [...]' to the base type", - type_name, b_name); - __Pyx_DECREF_TypeName(b_name); + Py_ssize_t b_dictoffset = 0; +#if CYTHON_USE_TYPE_SLOTS || CYTHON_COMPILING_IN_PYPY + b_dictoffset = b->tp_dictoffset; +#else + PyObject *py_b_dictoffset = PyObject_GetAttrString((PyObject*)b, "__dictoffset__"); + if (!py_b_dictoffset) goto dictoffset_return; + b_dictoffset = PyLong_AsSsize_t(py_b_dictoffset); + Py_DECREF(py_b_dictoffset); + if (b_dictoffset == -1 && PyErr_Occurred()) goto dictoffset_return; +#endif + if (b_dictoffset) { + { + __Pyx_TypeName b_name = __Pyx_PyType_GetName(b); + PyErr_Format(PyExc_TypeError, + "extension type '%.200s' has no __dict__ slot, " + "but base type '" __Pyx_FMT_TYPENAME "' has: " + "either add 'cdef dict __dict__' to the extension type " + "or add '__slots__ = [...]' to the base type", + type_name, b_name); + __Pyx_DECREF_TypeName(b_name); + } +#if !(CYTHON_USE_TYPE_SLOTS || CYTHON_COMPILING_IN_PYPY) + dictoffset_return: +#endif #if CYTHON_AVOID_BORROWED_REFS - Py_DECREF(b0); + Py_DECREF(b0); #endif - return -1; + return -1; + } } -#endif #if CYTHON_AVOID_BORROWED_REFS Py_DECREF(b0); #endif @@ -26752,10 +26934,10 @@ static int __Pyx_MergeVtables(PyTypeObject *type) { #endif /* TypeImport */ -#ifndef __PYX_HAVE_RT_ImportType_3_0_5 -#define __PYX_HAVE_RT_ImportType_3_0_5 -static PyTypeObject *__Pyx_ImportType_3_0_5(PyObject *module, const char *module_name, const char *class_name, - size_t size, size_t alignment, enum __Pyx_ImportType_CheckSize_3_0_5 check_size) +#ifndef __PYX_HAVE_RT_ImportType_3_0_10 +#define __PYX_HAVE_RT_ImportType_3_0_10 +static PyTypeObject *__Pyx_ImportType_3_0_10(PyObject *module, const char *module_name, const char *class_name, + size_t size, size_t alignment, enum __Pyx_ImportType_CheckSize_3_0_10 check_size) { PyObject *result = 0; char warning[200]; @@ -26809,7 +26991,7 @@ static PyTypeObject *__Pyx_ImportType_3_0_5(PyObject *module, const char *module module_name, class_name, size, basicsize+itemsize); goto bad; } - if (check_size == __Pyx_ImportType_CheckSize_Error_3_0_5 && + if (check_size == __Pyx_ImportType_CheckSize_Error_3_0_10 && ((size_t)basicsize > size || (size_t)(basicsize + itemsize) < size)) { PyErr_Format(PyExc_ValueError, "%.200s.%.200s size changed, may indicate binary incompatibility. " @@ -26817,7 +26999,7 @@ static PyTypeObject *__Pyx_ImportType_3_0_5(PyObject *module, const char *module module_name, class_name, size, basicsize, basicsize+itemsize); goto bad; } - else if (check_size == __Pyx_ImportType_CheckSize_Warn_3_0_5 && (size_t)basicsize > size) { + else if (check_size == __Pyx_ImportType_CheckSize_Warn_3_0_10 && (size_t)basicsize > size) { PyOS_snprintf(warning, sizeof(warning), "%s.%s size changed, may indicate binary incompatibility. " "Expected %zd from C header, got %zd from PyObject", @@ -26831,6 +27013,38 @@ static PyTypeObject *__Pyx_ImportType_3_0_5(PyObject *module, const char *module } #endif +/* pyfrozenset_new */ +static CYTHON_INLINE PyObject* __Pyx_PyFrozenSet_New(PyObject* it) { + if (it) { + PyObject* result; +#if CYTHON_COMPILING_IN_PYPY + PyObject* args; + args = PyTuple_Pack(1, it); + if (unlikely(!args)) + return NULL; + result = PyObject_Call((PyObject*)&PyFrozenSet_Type, args, NULL); + Py_DECREF(args); + return result; +#else + if (PyFrozenSet_CheckExact(it)) { + Py_INCREF(it); + return it; + } + result = PyFrozenSet_New(it); + if (unlikely(!result)) + return NULL; + if ((PY_VERSION_HEX >= 0x031000A1) || likely(PySet_GET_SIZE(result))) + return result; + Py_DECREF(result); +#endif + } +#if CYTHON_USE_TYPE_SLOTS + return PyFrozenSet_Type.tp_new(&PyFrozenSet_Type, __pyx_empty_tuple, NULL); +#else + return PyObject_Call((PyObject*)&PyFrozenSet_Type, __pyx_empty_tuple, NULL); +#endif +} + /* decode_c_string */ static CYTHON_INLINE PyObject* __Pyx_decode_c_string( const char* cstring, Py_ssize_t start, Py_ssize_t stop, @@ -27850,7 +28064,7 @@ static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, default: return NULL; } - return ((_PyCFunctionFastWithKeywords)(void(*)(void))def->ml_meth)(self, args, nargs, kwnames); + return ((__Pyx_PyCFunctionFastWithKeywords)(void(*)(void))def->ml_meth)(self, args, nargs, kwnames); } static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) { @@ -28309,7 +28523,7 @@ static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( #else py_code = PyCode_NewEmpty(filename, funcname, py_line); #endif - Py_XDECREF(py_funcname); // XDECREF since it's only set on Py3 if cline + Py_XDECREF(py_funcname); return py_code; bad: Py_XDECREF(py_funcname); @@ -29580,7 +29794,7 @@ __Pyx_PyType_GetName(PyTypeObject* tp) if (unlikely(name == NULL) || unlikely(!PyUnicode_Check(name))) { PyErr_Clear(); Py_XDECREF(name); - name = __Pyx_NewRef(__pyx_n_s__9); + name = __Pyx_NewRef(__pyx_n_s__10); } return name; } diff --git a/aiohttp/_http_parser.pyx b/aiohttp/_http_parser.pyx index 7ea9b32..dd317ed 100644 --- a/aiohttp/_http_parser.pyx +++ b/aiohttp/_http_parser.pyx @@ -47,6 +47,7 @@ include "_headers.pxi" from aiohttp cimport _find_header +ALLOWED_UPGRADES = frozenset({"websocket"}) DEF DEFAULT_FREELIST_SIZE = 250 cdef extern from "Python.h": @@ -417,7 +418,6 @@ cdef class HttpParser: cdef _on_headers_complete(self): self._process_header() - method = http_method_str(self._cparser.method) should_close = not cparser.llhttp_should_keep_alive(self._cparser) upgrade = self._cparser.upgrade chunked = self._cparser.flags & cparser.F_CHUNKED @@ -425,8 +425,13 @@ cdef class HttpParser: raw_headers = tuple(self._raw_headers) headers = CIMultiDictProxy(self._headers) - if upgrade or self._cparser.method == cparser.HTTP_CONNECT: - self._upgraded = True + if self._cparser.type == cparser.HTTP_REQUEST: + allowed = upgrade and headers.get("upgrade", "").lower() in ALLOWED_UPGRADES + if allowed or self._cparser.method == cparser.HTTP_CONNECT: + self._upgraded = True + else: + if upgrade and self._cparser.status_code == 101: + self._upgraded = True # do not support old websocket spec if SEC_WEBSOCKET_KEY1 in headers: @@ -441,6 +446,7 @@ cdef class HttpParser: encoding = enc if self._cparser.type == cparser.HTTP_REQUEST: + method = http_method_str(self._cparser.method) msg = _new_request_message( method, self._path, self.http_version(), headers, raw_headers, @@ -565,7 +571,7 @@ cdef class HttpParser: if self._upgraded: return messages, True, data[nb:] else: - return messages, False, b'' + return messages, False, b"" def set_upgraded(self, val): self._upgraded = val @@ -748,10 +754,7 @@ cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1: pyparser._last_error = exc return -1 else: - if ( - pyparser._cparser.upgrade or - pyparser._cparser.method == cparser.HTTP_CONNECT - ): + if pyparser._upgraded or pyparser._cparser.method == cparser.HTTP_CONNECT: return 2 else: return 0 diff --git a/aiohttp/_http_writer.c b/aiohttp/_http_writer.c index 74bc210..1c5bc45 100644 --- a/aiohttp/_http_writer.c +++ b/aiohttp/_http_writer.c @@ -1,4 +1,4 @@ -/* Generated by Cython 3.0.5 */ +/* Generated by Cython 3.0.10 */ #ifndef PY_SSIZE_T_CLEAN #define PY_SSIZE_T_CLEAN @@ -24,10 +24,10 @@ #else #define __PYX_EXTRA_ABI_MODULE_NAME "" #endif -#define CYTHON_ABI "3_0_5" __PYX_EXTRA_ABI_MODULE_NAME +#define CYTHON_ABI "3_0_10" __PYX_EXTRA_ABI_MODULE_NAME #define __PYX_ABI_MODULE_NAME "_cython_" CYTHON_ABI #define __PYX_TYPE_MODULE_PREFIX __PYX_ABI_MODULE_NAME "." -#define CYTHON_HEX_VERSION 0x030005F0 +#define CYTHON_HEX_VERSION 0x03000AF0 #define CYTHON_FUTURE_DIVISION 1 #include #ifndef offsetof @@ -119,6 +119,8 @@ #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 #endif + #undef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 #elif defined(PYPY_VERSION) #define CYTHON_COMPILING_IN_PYPY 1 #define CYTHON_COMPILING_IN_CPYTHON 0 @@ -180,6 +182,8 @@ #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 #endif + #undef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 #elif defined(CYTHON_LIMITED_API) #ifdef Py_LIMITED_API #undef __PYX_LIMITED_VERSION_HEX @@ -241,7 +245,9 @@ #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 #endif -#elif defined(PY_NOGIL) + #undef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 +#elif defined(Py_GIL_DISABLED) || defined(Py_NOGIL) #define CYTHON_COMPILING_IN_PYPY 0 #define CYTHON_COMPILING_IN_CPYTHON 0 #define CYTHON_COMPILING_IN_LIMITED_API 0 @@ -250,11 +256,17 @@ #ifndef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 1 #endif + #ifndef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 0 + #endif #undef CYTHON_USE_PYTYPE_LOOKUP #define CYTHON_USE_PYTYPE_LOOKUP 0 #ifndef CYTHON_USE_ASYNC_SLOTS #define CYTHON_USE_ASYNC_SLOTS 1 #endif + #ifndef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #endif #undef CYTHON_USE_PYLIST_INTERNALS #define CYTHON_USE_PYLIST_INTERNALS 0 #ifndef CYTHON_USE_UNICODE_INTERNALS @@ -262,8 +274,6 @@ #endif #undef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 #ifndef CYTHON_AVOID_BORROWED_REFS #define CYTHON_AVOID_BORROWED_REFS 0 #endif @@ -275,11 +285,22 @@ #endif #undef CYTHON_FAST_THREAD_STATE #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL 0 + #ifndef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL 1 + #endif #undef CYTHON_FAST_PYCALL #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS 1 + #endif #ifndef CYTHON_PEP489_MULTI_PHASE_INIT #define CYTHON_PEP489_MULTI_PHASE_INIT 1 #endif + #ifndef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 0 + #endif #ifndef CYTHON_USE_TP_FINALIZE #define CYTHON_USE_TP_FINALIZE 1 #endif @@ -287,6 +308,12 @@ #define CYTHON_USE_DICT_VERSIONS 0 #undef CYTHON_USE_EXC_INFO_STACK #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 + #endif + #ifndef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 + #endif #else #define CYTHON_COMPILING_IN_PYPY 0 #define CYTHON_COMPILING_IN_CPYTHON 1 @@ -377,6 +404,9 @@ #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 #endif + #ifndef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 1 + #endif #endif #if !defined(CYTHON_FAST_PYCCALL) #define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) @@ -569,18 +599,19 @@ PyObject *exception_table = NULL; PyObject *types_module=NULL, *code_type=NULL, *result=NULL; #if __PYX_LIMITED_VERSION_HEX < 0x030B0000 - PyObject *version_info; // borrowed - #endif + PyObject *version_info; PyObject *py_minor_version = NULL; + #endif long minor_version = 0; PyObject *type, *value, *traceback; PyErr_Fetch(&type, &value, &traceback); #if __PYX_LIMITED_VERSION_HEX >= 0x030B0000 - minor_version = 11; // we don't yet need to distinguish between versions > 11 + minor_version = 11; #else if (!(version_info = PySys_GetObject("version_info"))) goto end; if (!(py_minor_version = PySequence_GetItem(version_info, 1))) goto end; minor_version = PyLong_AsLong(py_minor_version); + Py_DECREF(py_minor_version); if (minor_version == -1 && PyErr_Occurred()) goto end; #endif if (!(types_module = PyImport_ImportModule("types"))) goto end; @@ -601,7 +632,6 @@ Py_XDECREF(code_type); Py_XDECREF(exception_table); Py_XDECREF(types_module); - Py_XDECREF(py_minor_version); if (type) { PyErr_Restore(type, value, traceback); } @@ -634,7 +664,7 @@ PyObject *fv, PyObject *cell, PyObject* fn, PyObject *name, int fline, PyObject *lnos) { PyCodeObject *result; - PyObject *empty_bytes = PyBytes_FromStringAndSize("", 0); // we don't have access to __pyx_empty_bytes here + PyObject *empty_bytes = PyBytes_FromStringAndSize("", 0); if (!empty_bytes) return NULL; result = #if PY_VERSION_HEX >= 0x030C0000 @@ -720,8 +750,13 @@ typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames); #else - #define __Pyx_PyCFunctionFast _PyCFunctionFast - #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords + #if PY_VERSION_HEX >= 0x030d00A4 + # define __Pyx_PyCFunctionFast PyCFunctionFast + # define __Pyx_PyCFunctionFastWithKeywords PyCFunctionFastWithKeywords + #else + # define __Pyx_PyCFunctionFast _PyCFunctionFast + # define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords + #endif #endif #if CYTHON_METH_FASTCALL #define __Pyx_METH_FASTCALL METH_FASTCALL @@ -929,7 +964,7 @@ static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStrWithError(PyObject *dict, #endif #if CYTHON_USE_TYPE_SPECS && PY_VERSION_HEX >= 0x03080000 #define __Pyx_PyHeapTypeObject_GC_Del(obj) {\ - PyTypeObject *type = Py_TYPE(obj);\ + PyTypeObject *type = Py_TYPE((PyObject*)obj);\ assert(__Pyx_PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE));\ PyObject_GC_Del(obj);\ Py_DECREF(type);\ @@ -1073,7 +1108,7 @@ static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStrWithError(PyObject *dict, #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_Size(o) #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_Size(o) #endif -#if PY_VERSION_HEX >= 0x030d00A1 +#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 #define __Pyx_PyImport_AddModuleRef(name) PyImport_AddModuleRef(name) #else static CYTHON_INLINE PyObject *__Pyx_PyImport_AddModuleRef(const char *name) { @@ -1160,7 +1195,7 @@ static CYTHON_INLINE float __PYX_NAN() { #endif #define __PYX_MARK_ERR_POS(f_index, lineno) \ - { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } + { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } #define __PYX_ERR(f_index, lineno, Ln_error) \ { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; } @@ -1266,24 +1301,7 @@ static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); #define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) #define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) #define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) -#if CYTHON_COMPILING_IN_LIMITED_API -static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const wchar_t *u) -{ - const wchar_t *u_end = u; - while (*u_end++) ; - return (size_t)(u_end - u - 1); -} -#else -static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) -{ - const Py_UNICODE *u_end = u; - while (*u_end++) ; - return (size_t)(u_end - u - 1); -} -#endif #define __Pyx_PyUnicode_FromOrdinal(o) PyUnicode_FromOrdinal((int)o) -#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) -#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode #define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode #define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) #define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) @@ -1333,7 +1351,7 @@ static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*); #endif typedef Py_ssize_t __Pyx_compact_pylong; typedef size_t __Pyx_compact_upylong; - #else // Py < 3.12 + #else #define __Pyx_PyLong_IsNeg(x) (Py_SIZE(x) < 0) #define __Pyx_PyLong_IsNonNeg(x) (Py_SIZE(x) >= 0) #define __Pyx_PyLong_IsZero(x) (Py_SIZE(x) == 0) @@ -1727,8 +1745,8 @@ static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int #define __Pyx_Arg_NewRef_VARARGS(arg) __Pyx_NewRef(arg) #define __Pyx_Arg_XDECREF_VARARGS(arg) Py_XDECREF(arg) #else - #define __Pyx_Arg_NewRef_VARARGS(arg) arg // no-op - #define __Pyx_Arg_XDECREF_VARARGS(arg) // no-op - arg is borrowed + #define __Pyx_Arg_NewRef_VARARGS(arg) arg + #define __Pyx_Arg_XDECREF_VARARGS(arg) #endif #define __Pyx_NumKwargs_VARARGS(kwds) PyDict_Size(kwds) #define __Pyx_KwValues_VARARGS(args, nargs) NULL @@ -1740,12 +1758,13 @@ static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int #define __Pyx_KwValues_FASTCALL(args, nargs) ((args) + (nargs)) static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s); #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 - static CYTHON_UNUSED PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues); + CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues); #else #define __Pyx_KwargsAsDict_FASTCALL(kw, kwvalues) _PyStack_AsDict(kwvalues, kw) #endif - #define __Pyx_Arg_NewRef_FASTCALL(arg) arg // no-op, __Pyx_Arg_FASTCALL is direct and this needs - #define __Pyx_Arg_XDECREF_FASTCALL(arg) // no-op - arg was returned from array + #define __Pyx_Arg_NewRef_FASTCALL(arg) arg /* no-op, __Pyx_Arg_FASTCALL is direct and this needs + to have the same reference counting */ + #define __Pyx_Arg_XDECREF_FASTCALL(arg) #else #define __Pyx_Arg_FASTCALL __Pyx_Arg_VARARGS #define __Pyx_NumKwargs_FASTCALL __Pyx_NumKwargs_VARARGS @@ -1863,22 +1882,22 @@ static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject #endif /* TypeImport.proto */ -#ifndef __PYX_HAVE_RT_ImportType_proto_3_0_5 -#define __PYX_HAVE_RT_ImportType_proto_3_0_5 +#ifndef __PYX_HAVE_RT_ImportType_proto_3_0_10 +#define __PYX_HAVE_RT_ImportType_proto_3_0_10 #if defined (__STDC_VERSION__) && __STDC_VERSION__ >= 201112L #include #endif #if (defined (__STDC_VERSION__) && __STDC_VERSION__ >= 201112L) || __cplusplus >= 201103L -#define __PYX_GET_STRUCT_ALIGNMENT_3_0_5(s) alignof(s) +#define __PYX_GET_STRUCT_ALIGNMENT_3_0_10(s) alignof(s) #else -#define __PYX_GET_STRUCT_ALIGNMENT_3_0_5(s) sizeof(void*) +#define __PYX_GET_STRUCT_ALIGNMENT_3_0_10(s) sizeof(void*) #endif -enum __Pyx_ImportType_CheckSize_3_0_5 { - __Pyx_ImportType_CheckSize_Error_3_0_5 = 0, - __Pyx_ImportType_CheckSize_Warn_3_0_5 = 1, - __Pyx_ImportType_CheckSize_Ignore_3_0_5 = 2 +enum __Pyx_ImportType_CheckSize_3_0_10 { + __Pyx_ImportType_CheckSize_Error_3_0_10 = 0, + __Pyx_ImportType_CheckSize_Warn_3_0_10 = 1, + __Pyx_ImportType_CheckSize_Ignore_3_0_10 = 2 }; -static PyTypeObject *__Pyx_ImportType_3_0_5(PyObject* module, const char *module_name, const char *class_name, size_t size, size_t alignment, enum __Pyx_ImportType_CheckSize_3_0_5 check_size); +static PyTypeObject *__Pyx_ImportType_3_0_10(PyObject* module, const char *module_name, const char *class_name, size_t size, size_t alignment, enum __Pyx_ImportType_CheckSize_3_0_10 check_size); #endif /* Import.proto */ @@ -2032,7 +2051,7 @@ typedef struct { #endif void *defaults; int defaults_pyobjects; - size_t defaults_size; // used by FusedFunction for copying defaults + size_t defaults_size; int flags; PyObject *defaults_tuple; PyObject *defaults_kwdict; @@ -4481,15 +4500,15 @@ static int __Pyx_modinit_type_import_code(void) { /*--- Type import code ---*/ __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 9, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_ptype_7cpython_4type_type = __Pyx_ImportType_3_0_5(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "type", + __pyx_ptype_7cpython_4type_type = __Pyx_ImportType_3_0_10(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "type", #if defined(PYPY_VERSION_NUM) && PYPY_VERSION_NUM < 0x050B0000 - sizeof(PyTypeObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_5(PyTypeObject), + sizeof(PyTypeObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_10(PyTypeObject), #elif CYTHON_COMPILING_IN_LIMITED_API - sizeof(PyTypeObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_5(PyTypeObject), + sizeof(PyTypeObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_10(PyTypeObject), #else - sizeof(PyHeapTypeObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_5(PyHeapTypeObject), + sizeof(PyHeapTypeObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_10(PyHeapTypeObject), #endif - __Pyx_ImportType_CheckSize_Warn_3_0_5); if (!__pyx_ptype_7cpython_4type_type) __PYX_ERR(1, 9, __pyx_L1_error) + __Pyx_ImportType_CheckSize_Warn_3_0_10); if (!__pyx_ptype_7cpython_4type_type) __PYX_ERR(1, 9, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_RefNannyFinishContext(); return 0; @@ -4704,7 +4723,7 @@ static CYTHON_SMALL_CODE int __pyx_pymod_exec__http_writer(PyObject *__pyx_pyini __pyx_t_1 = PyModule_Create(&__pyx_moduledef); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) { int add_module_result = PyState_AddModule(__pyx_t_1, &__pyx_moduledef); - __pyx_t_1 = 0; /* transfer ownership from __pyx_t_1 to _http_writer pseudovariable */ + __pyx_t_1 = 0; /* transfer ownership from __pyx_t_1 to "_http_writer" pseudovariable */ if (unlikely((add_module_result < 0))) __PYX_ERR(0, 1, __pyx_L1_error) pystate_addmodule_run = 1; } @@ -5727,14 +5746,14 @@ static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyO { int eq = __Pyx_PyUnicode_Equals(s, PyTuple_GET_ITEM(kwnames, i), Py_EQ); if (unlikely(eq != 0)) { - if (unlikely(eq < 0)) return NULL; // error + if (unlikely(eq < 0)) return NULL; return kwvalues[i]; } } - return NULL; // not found (no exception set) + return NULL; } #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 -static CYTHON_UNUSED PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues) { +CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues) { Py_ssize_t i, nkwargs = PyTuple_GET_SIZE(kwnames); PyObject *dict; dict = PyDict_New(); @@ -5844,7 +5863,7 @@ static int __Pyx_ParseOptionalKeywords( if (*name) { values[name-argnames] = value; #if CYTHON_AVOID_BORROWED_REFS - Py_INCREF(value); // transfer ownership of value to values + Py_INCREF(value); Py_DECREF(key); #endif key = NULL; @@ -5863,7 +5882,7 @@ static int __Pyx_ParseOptionalKeywords( && _PyString_Eq(**name, key)) { values[name-argnames] = value; #if CYTHON_AVOID_BORROWED_REFS - value = NULL; // ownership transferred to values + value = NULL; #endif break; } @@ -5895,7 +5914,7 @@ static int __Pyx_ParseOptionalKeywords( if (cmp == 0) { values[name-argnames] = value; #if CYTHON_AVOID_BORROWED_REFS - value = NULL; // ownership transferred to values + value = NULL; #endif break; } @@ -5978,9 +5997,10 @@ static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *nam /* IterFinish */ static CYTHON_INLINE int __Pyx_IterFinish(void) { + PyObject* exc_type; __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign - PyObject* exc_type = __Pyx_PyErr_CurrentExceptionType(); + exc_type = __Pyx_PyErr_CurrentExceptionType(); if (unlikely(exc_type)) { if (unlikely(!__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) return -1; @@ -6598,10 +6618,10 @@ static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject #endif /* TypeImport */ -#ifndef __PYX_HAVE_RT_ImportType_3_0_5 -#define __PYX_HAVE_RT_ImportType_3_0_5 -static PyTypeObject *__Pyx_ImportType_3_0_5(PyObject *module, const char *module_name, const char *class_name, - size_t size, size_t alignment, enum __Pyx_ImportType_CheckSize_3_0_5 check_size) +#ifndef __PYX_HAVE_RT_ImportType_3_0_10 +#define __PYX_HAVE_RT_ImportType_3_0_10 +static PyTypeObject *__Pyx_ImportType_3_0_10(PyObject *module, const char *module_name, const char *class_name, + size_t size, size_t alignment, enum __Pyx_ImportType_CheckSize_3_0_10 check_size) { PyObject *result = 0; char warning[200]; @@ -6655,7 +6675,7 @@ static PyTypeObject *__Pyx_ImportType_3_0_5(PyObject *module, const char *module module_name, class_name, size, basicsize+itemsize); goto bad; } - if (check_size == __Pyx_ImportType_CheckSize_Error_3_0_5 && + if (check_size == __Pyx_ImportType_CheckSize_Error_3_0_10 && ((size_t)basicsize > size || (size_t)(basicsize + itemsize) < size)) { PyErr_Format(PyExc_ValueError, "%.200s.%.200s size changed, may indicate binary incompatibility. " @@ -6663,7 +6683,7 @@ static PyTypeObject *__Pyx_ImportType_3_0_5(PyObject *module, const char *module module_name, class_name, size, basicsize, basicsize+itemsize); goto bad; } - else if (check_size == __Pyx_ImportType_CheckSize_Warn_3_0_5 && (size_t)basicsize > size) { + else if (check_size == __Pyx_ImportType_CheckSize_Warn_3_0_10 && (size_t)basicsize > size) { PyOS_snprintf(warning, sizeof(warning), "%s.%s size changed, may indicate binary incompatibility. " "Expected %zd from C header, got %zd from PyObject", @@ -7906,7 +7926,7 @@ static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, default: return NULL; } - return ((_PyCFunctionFastWithKeywords)(void(*)(void))def->ml_meth)(self, args, nargs, kwnames); + return ((__Pyx_PyCFunctionFastWithKeywords)(void(*)(void))def->ml_meth)(self, args, nargs, kwnames); } static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) { @@ -8365,7 +8385,7 @@ static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( #else py_code = PyCode_NewEmpty(filename, funcname, py_line); #endif - Py_XDECREF(py_funcname); // XDECREF since it's only set on Py3 if cline + Py_XDECREF(py_funcname); return py_code; bad: Py_XDECREF(py_funcname); diff --git a/aiohttp/_websocket.c b/aiohttp/_websocket.c index 0ef68a7..88690bb 100644 --- a/aiohttp/_websocket.c +++ b/aiohttp/_websocket.c @@ -1,4 +1,4 @@ -/* Generated by Cython 3.0.5 */ +/* Generated by Cython 3.0.10 */ #ifndef PY_SSIZE_T_CLEAN #define PY_SSIZE_T_CLEAN @@ -41,10 +41,10 @@ #else #define __PYX_EXTRA_ABI_MODULE_NAME "" #endif -#define CYTHON_ABI "3_0_5" __PYX_EXTRA_ABI_MODULE_NAME +#define CYTHON_ABI "3_0_10" __PYX_EXTRA_ABI_MODULE_NAME #define __PYX_ABI_MODULE_NAME "_cython_" CYTHON_ABI #define __PYX_TYPE_MODULE_PREFIX __PYX_ABI_MODULE_NAME "." -#define CYTHON_HEX_VERSION 0x030005F0 +#define CYTHON_HEX_VERSION 0x03000AF0 #define CYTHON_FUTURE_DIVISION 1 #include #ifndef offsetof @@ -136,6 +136,8 @@ #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 #endif + #undef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 #elif defined(PYPY_VERSION) #define CYTHON_COMPILING_IN_PYPY 1 #define CYTHON_COMPILING_IN_CPYTHON 0 @@ -197,6 +199,8 @@ #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 #endif + #undef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 #elif defined(CYTHON_LIMITED_API) #ifdef Py_LIMITED_API #undef __PYX_LIMITED_VERSION_HEX @@ -258,7 +262,9 @@ #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 #endif -#elif defined(PY_NOGIL) + #undef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 +#elif defined(Py_GIL_DISABLED) || defined(Py_NOGIL) #define CYTHON_COMPILING_IN_PYPY 0 #define CYTHON_COMPILING_IN_CPYTHON 0 #define CYTHON_COMPILING_IN_LIMITED_API 0 @@ -267,11 +273,17 @@ #ifndef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 1 #endif + #ifndef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 0 + #endif #undef CYTHON_USE_PYTYPE_LOOKUP #define CYTHON_USE_PYTYPE_LOOKUP 0 #ifndef CYTHON_USE_ASYNC_SLOTS #define CYTHON_USE_ASYNC_SLOTS 1 #endif + #ifndef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #endif #undef CYTHON_USE_PYLIST_INTERNALS #define CYTHON_USE_PYLIST_INTERNALS 0 #ifndef CYTHON_USE_UNICODE_INTERNALS @@ -279,8 +291,6 @@ #endif #undef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 #ifndef CYTHON_AVOID_BORROWED_REFS #define CYTHON_AVOID_BORROWED_REFS 0 #endif @@ -292,11 +302,22 @@ #endif #undef CYTHON_FAST_THREAD_STATE #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL 0 + #ifndef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL 1 + #endif #undef CYTHON_FAST_PYCALL #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS 1 + #endif #ifndef CYTHON_PEP489_MULTI_PHASE_INIT #define CYTHON_PEP489_MULTI_PHASE_INIT 1 #endif + #ifndef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 0 + #endif #ifndef CYTHON_USE_TP_FINALIZE #define CYTHON_USE_TP_FINALIZE 1 #endif @@ -304,6 +325,12 @@ #define CYTHON_USE_DICT_VERSIONS 0 #undef CYTHON_USE_EXC_INFO_STACK #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 + #endif + #ifndef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 + #endif #else #define CYTHON_COMPILING_IN_PYPY 0 #define CYTHON_COMPILING_IN_CPYTHON 1 @@ -394,6 +421,9 @@ #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 #endif + #ifndef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 1 + #endif #endif #if !defined(CYTHON_FAST_PYCCALL) #define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) @@ -586,18 +616,19 @@ PyObject *exception_table = NULL; PyObject *types_module=NULL, *code_type=NULL, *result=NULL; #if __PYX_LIMITED_VERSION_HEX < 0x030B0000 - PyObject *version_info; // borrowed - #endif + PyObject *version_info; PyObject *py_minor_version = NULL; + #endif long minor_version = 0; PyObject *type, *value, *traceback; PyErr_Fetch(&type, &value, &traceback); #if __PYX_LIMITED_VERSION_HEX >= 0x030B0000 - minor_version = 11; // we don't yet need to distinguish between versions > 11 + minor_version = 11; #else if (!(version_info = PySys_GetObject("version_info"))) goto end; if (!(py_minor_version = PySequence_GetItem(version_info, 1))) goto end; minor_version = PyLong_AsLong(py_minor_version); + Py_DECREF(py_minor_version); if (minor_version == -1 && PyErr_Occurred()) goto end; #endif if (!(types_module = PyImport_ImportModule("types"))) goto end; @@ -618,7 +649,6 @@ Py_XDECREF(code_type); Py_XDECREF(exception_table); Py_XDECREF(types_module); - Py_XDECREF(py_minor_version); if (type) { PyErr_Restore(type, value, traceback); } @@ -651,7 +681,7 @@ PyObject *fv, PyObject *cell, PyObject* fn, PyObject *name, int fline, PyObject *lnos) { PyCodeObject *result; - PyObject *empty_bytes = PyBytes_FromStringAndSize("", 0); // we don't have access to __pyx_empty_bytes here + PyObject *empty_bytes = PyBytes_FromStringAndSize("", 0); if (!empty_bytes) return NULL; result = #if PY_VERSION_HEX >= 0x030C0000 @@ -737,8 +767,13 @@ typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames); #else - #define __Pyx_PyCFunctionFast _PyCFunctionFast - #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords + #if PY_VERSION_HEX >= 0x030d00A4 + # define __Pyx_PyCFunctionFast PyCFunctionFast + # define __Pyx_PyCFunctionFastWithKeywords PyCFunctionFastWithKeywords + #else + # define __Pyx_PyCFunctionFast _PyCFunctionFast + # define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords + #endif #endif #if CYTHON_METH_FASTCALL #define __Pyx_METH_FASTCALL METH_FASTCALL @@ -946,7 +981,7 @@ static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStrWithError(PyObject *dict, #endif #if CYTHON_USE_TYPE_SPECS && PY_VERSION_HEX >= 0x03080000 #define __Pyx_PyHeapTypeObject_GC_Del(obj) {\ - PyTypeObject *type = Py_TYPE(obj);\ + PyTypeObject *type = Py_TYPE((PyObject*)obj);\ assert(__Pyx_PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE));\ PyObject_GC_Del(obj);\ Py_DECREF(type);\ @@ -1090,7 +1125,7 @@ static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStrWithError(PyObject *dict, #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_Size(o) #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_Size(o) #endif -#if PY_VERSION_HEX >= 0x030d00A1 +#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 #define __Pyx_PyImport_AddModuleRef(name) PyImport_AddModuleRef(name) #else static CYTHON_INLINE PyObject *__Pyx_PyImport_AddModuleRef(const char *name) { @@ -1177,7 +1212,7 @@ static CYTHON_INLINE float __PYX_NAN() { #endif #define __PYX_MARK_ERR_POS(f_index, lineno) \ - { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } + { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } #define __PYX_ERR(f_index, lineno, Ln_error) \ { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; } @@ -1203,6 +1238,7 @@ static CYTHON_INLINE float __PYX_NAN() { /* Early includes */ #include #include +#include #include "pythread.h" #include #ifdef _OPENMP @@ -1284,24 +1320,7 @@ static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); #define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) #define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) #define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) -#if CYTHON_COMPILING_IN_LIMITED_API -static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const wchar_t *u) -{ - const wchar_t *u_end = u; - while (*u_end++) ; - return (size_t)(u_end - u - 1); -} -#else -static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) -{ - const Py_UNICODE *u_end = u; - while (*u_end++) ; - return (size_t)(u_end - u - 1); -} -#endif #define __Pyx_PyUnicode_FromOrdinal(o) PyUnicode_FromOrdinal((int)o) -#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) -#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode #define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode #define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) #define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) @@ -1351,7 +1370,7 @@ static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*); #endif typedef Py_ssize_t __Pyx_compact_pylong; typedef size_t __Pyx_compact_upylong; - #else // Py < 3.12 + #else #define __Pyx_PyLong_IsNeg(x) (Py_SIZE(x) < 0) #define __Pyx_PyLong_IsNonNeg(x) (Py_SIZE(x) >= 0) #define __Pyx_PyLong_IsZero(x) (Py_SIZE(x) == 0) @@ -1686,8 +1705,8 @@ static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int #define __Pyx_Arg_NewRef_VARARGS(arg) __Pyx_NewRef(arg) #define __Pyx_Arg_XDECREF_VARARGS(arg) Py_XDECREF(arg) #else - #define __Pyx_Arg_NewRef_VARARGS(arg) arg // no-op - #define __Pyx_Arg_XDECREF_VARARGS(arg) // no-op - arg is borrowed + #define __Pyx_Arg_NewRef_VARARGS(arg) arg + #define __Pyx_Arg_XDECREF_VARARGS(arg) #endif #define __Pyx_NumKwargs_VARARGS(kwds) PyDict_Size(kwds) #define __Pyx_KwValues_VARARGS(args, nargs) NULL @@ -1699,12 +1718,13 @@ static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int #define __Pyx_KwValues_FASTCALL(args, nargs) ((args) + (nargs)) static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s); #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 - static CYTHON_UNUSED PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues); + CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues); #else #define __Pyx_KwargsAsDict_FASTCALL(kw, kwvalues) _PyStack_AsDict(kwvalues, kw) #endif - #define __Pyx_Arg_NewRef_FASTCALL(arg) arg // no-op, __Pyx_Arg_FASTCALL is direct and this needs - #define __Pyx_Arg_XDECREF_FASTCALL(arg) // no-op - arg was returned from array + #define __Pyx_Arg_NewRef_FASTCALL(arg) arg /* no-op, __Pyx_Arg_FASTCALL is direct and this needs + to have the same reference counting */ + #define __Pyx_Arg_XDECREF_FASTCALL(arg) #else #define __Pyx_Arg_FASTCALL __Pyx_Arg_VARARGS #define __Pyx_NumKwargs_FASTCALL __Pyx_NumKwargs_VARARGS @@ -1827,22 +1847,22 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObjec static int __Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj); /* TypeImport.proto */ -#ifndef __PYX_HAVE_RT_ImportType_proto_3_0_5 -#define __PYX_HAVE_RT_ImportType_proto_3_0_5 +#ifndef __PYX_HAVE_RT_ImportType_proto_3_0_10 +#define __PYX_HAVE_RT_ImportType_proto_3_0_10 #if defined (__STDC_VERSION__) && __STDC_VERSION__ >= 201112L #include #endif #if (defined (__STDC_VERSION__) && __STDC_VERSION__ >= 201112L) || __cplusplus >= 201103L -#define __PYX_GET_STRUCT_ALIGNMENT_3_0_5(s) alignof(s) +#define __PYX_GET_STRUCT_ALIGNMENT_3_0_10(s) alignof(s) #else -#define __PYX_GET_STRUCT_ALIGNMENT_3_0_5(s) sizeof(void*) +#define __PYX_GET_STRUCT_ALIGNMENT_3_0_10(s) sizeof(void*) #endif -enum __Pyx_ImportType_CheckSize_3_0_5 { - __Pyx_ImportType_CheckSize_Error_3_0_5 = 0, - __Pyx_ImportType_CheckSize_Warn_3_0_5 = 1, - __Pyx_ImportType_CheckSize_Ignore_3_0_5 = 2 +enum __Pyx_ImportType_CheckSize_3_0_10 { + __Pyx_ImportType_CheckSize_Error_3_0_10 = 0, + __Pyx_ImportType_CheckSize_Warn_3_0_10 = 1, + __Pyx_ImportType_CheckSize_Ignore_3_0_10 = 2 }; -static PyTypeObject *__Pyx_ImportType_3_0_5(PyObject* module, const char *module_name, const char *class_name, size_t size, size_t alignment, enum __Pyx_ImportType_CheckSize_3_0_5 check_size); +static PyTypeObject *__Pyx_ImportType_3_0_10(PyObject* module, const char *module_name, const char *class_name, size_t size, size_t alignment, enum __Pyx_ImportType_CheckSize_3_0_10 check_size); #endif /* IncludeStructmemberH.proto */ @@ -1943,7 +1963,7 @@ typedef struct { #endif void *defaults; int defaults_pyobjects; - size_t defaults_size; // used by FusedFunction for copying defaults + size_t defaults_size; int flags; PyObject *defaults_tuple; PyObject *defaults_kwdict; @@ -2150,6 +2170,8 @@ static CYTHON_INLINE double __pyx_f_7cpython_7complex_7complex_4imag_imag(PyComp /* Module declarations from "cpython.string" */ +/* Module declarations from "libc.stddef" */ + /* Module declarations from "cpython.unicode" */ /* Module declarations from "cpython.pyport" */ @@ -2340,6 +2362,8 @@ typedef struct { #endif #if CYTHON_USE_MODULE_STATE #endif + #if CYTHON_USE_MODULE_STATE + #endif PyObject *__pyx_n_s_AssertionError; PyObject *__pyx_n_s__3; PyObject *__pyx_n_s_aiohttp__websocket; @@ -2593,6 +2617,8 @@ static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { #endif #if CYTHON_USE_MODULE_STATE #endif +#if CYTHON_USE_MODULE_STATE +#endif #define __pyx_n_s_AssertionError __pyx_mstate_global->__pyx_n_s_AssertionError #define __pyx_n_s__3 __pyx_mstate_global->__pyx_n_s__3 #define __pyx_n_s_aiohttp__websocket __pyx_mstate_global->__pyx_n_s_aiohttp__websocket @@ -2620,7 +2646,7 @@ static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { /* "cpython/complex.pxd":19 * * @property - * cdef inline double real(self): # <<<<<<<<<<<<<< + * cdef inline double real(self) noexcept: # <<<<<<<<<<<<<< * return self.cval.real * */ @@ -2630,7 +2656,7 @@ static CYTHON_INLINE double __pyx_f_7cpython_7complex_7complex_4real_real(PyComp /* "cpython/complex.pxd":20 * @property - * cdef inline double real(self): + * cdef inline double real(self) noexcept: * return self.cval.real # <<<<<<<<<<<<<< * * @property @@ -2641,7 +2667,7 @@ static CYTHON_INLINE double __pyx_f_7cpython_7complex_7complex_4real_real(PyComp /* "cpython/complex.pxd":19 * * @property - * cdef inline double real(self): # <<<<<<<<<<<<<< + * cdef inline double real(self) noexcept: # <<<<<<<<<<<<<< * return self.cval.real * */ @@ -2654,7 +2680,7 @@ static CYTHON_INLINE double __pyx_f_7cpython_7complex_7complex_4real_real(PyComp /* "cpython/complex.pxd":23 * * @property - * cdef inline double imag(self): # <<<<<<<<<<<<<< + * cdef inline double imag(self) noexcept: # <<<<<<<<<<<<<< * return self.cval.imag * */ @@ -2664,7 +2690,7 @@ static CYTHON_INLINE double __pyx_f_7cpython_7complex_7complex_4imag_imag(PyComp /* "cpython/complex.pxd":24 * @property - * cdef inline double imag(self): + * cdef inline double imag(self) noexcept: * return self.cval.imag # <<<<<<<<<<<<<< * * # PyTypeObject PyComplex_Type @@ -2675,7 +2701,7 @@ static CYTHON_INLINE double __pyx_f_7cpython_7complex_7complex_4imag_imag(PyComp /* "cpython/complex.pxd":23 * * @property - * cdef inline double imag(self): # <<<<<<<<<<<<<< + * cdef inline double imag(self) noexcept: # <<<<<<<<<<<<<< * return self.cval.imag * */ @@ -3501,23 +3527,23 @@ static int __Pyx_modinit_type_import_code(void) { /*--- Type import code ---*/ __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 9, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_ptype_7cpython_4type_type = __Pyx_ImportType_3_0_5(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "type", + __pyx_ptype_7cpython_4type_type = __Pyx_ImportType_3_0_10(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "type", #if defined(PYPY_VERSION_NUM) && PYPY_VERSION_NUM < 0x050B0000 - sizeof(PyTypeObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_5(PyTypeObject), + sizeof(PyTypeObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_10(PyTypeObject), #elif CYTHON_COMPILING_IN_LIMITED_API - sizeof(PyTypeObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_5(PyTypeObject), + sizeof(PyTypeObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_10(PyTypeObject), #else - sizeof(PyHeapTypeObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_5(PyHeapTypeObject), + sizeof(PyHeapTypeObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_10(PyHeapTypeObject), #endif - __Pyx_ImportType_CheckSize_Warn_3_0_5); if (!__pyx_ptype_7cpython_4type_type) __PYX_ERR(2, 9, __pyx_L1_error) + __Pyx_ImportType_CheckSize_Warn_3_0_10); if (!__pyx_ptype_7cpython_4type_type) __PYX_ERR(2, 9, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(3, 8, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_ptype_7cpython_4bool_bool = __Pyx_ImportType_3_0_5(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "bool", sizeof(PyBoolObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_5(PyBoolObject),__Pyx_ImportType_CheckSize_Warn_3_0_5); if (!__pyx_ptype_7cpython_4bool_bool) __PYX_ERR(3, 8, __pyx_L1_error) + __pyx_ptype_7cpython_4bool_bool = __Pyx_ImportType_3_0_10(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "bool", sizeof(PyBoolObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_10(PyBoolObject),__Pyx_ImportType_CheckSize_Warn_3_0_10); if (!__pyx_ptype_7cpython_4bool_bool) __PYX_ERR(3, 8, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(4, 15, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_ptype_7cpython_7complex_complex = __Pyx_ImportType_3_0_5(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "complex", sizeof(PyComplexObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_5(PyComplexObject),__Pyx_ImportType_CheckSize_Warn_3_0_5); if (!__pyx_ptype_7cpython_7complex_complex) __PYX_ERR(4, 15, __pyx_L1_error) + __pyx_ptype_7cpython_7complex_complex = __Pyx_ImportType_3_0_10(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "complex", sizeof(PyComplexObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_10(PyComplexObject),__Pyx_ImportType_CheckSize_Warn_3_0_10); if (!__pyx_ptype_7cpython_7complex_complex) __PYX_ERR(4, 15, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_RefNannyFinishContext(); return 0; @@ -3731,7 +3757,7 @@ static CYTHON_SMALL_CODE int __pyx_pymod_exec__websocket(PyObject *__pyx_pyinit_ __pyx_t_1 = PyModule_Create(&__pyx_moduledef); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) { int add_module_result = PyState_AddModule(__pyx_t_1, &__pyx_moduledef); - __pyx_t_1 = 0; /* transfer ownership from __pyx_t_1 to _websocket pseudovariable */ + __pyx_t_1 = 0; /* transfer ownership from __pyx_t_1 to "_websocket" pseudovariable */ if (unlikely((add_module_result < 0))) __PYX_ERR(0, 1, __pyx_L1_error) pystate_addmodule_run = 1; } @@ -4269,14 +4295,14 @@ static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyO { int eq = __Pyx_PyUnicode_Equals(s, PyTuple_GET_ITEM(kwnames, i), Py_EQ); if (unlikely(eq != 0)) { - if (unlikely(eq < 0)) return NULL; // error + if (unlikely(eq < 0)) return NULL; return kwvalues[i]; } } - return NULL; // not found (no exception set) + return NULL; } #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 -static CYTHON_UNUSED PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues) { +CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues) { Py_ssize_t i, nkwargs = PyTuple_GET_SIZE(kwnames); PyObject *dict; dict = PyDict_New(); @@ -4386,7 +4412,7 @@ static int __Pyx_ParseOptionalKeywords( if (*name) { values[name-argnames] = value; #if CYTHON_AVOID_BORROWED_REFS - Py_INCREF(value); // transfer ownership of value to values + Py_INCREF(value); Py_DECREF(key); #endif key = NULL; @@ -4405,7 +4431,7 @@ static int __Pyx_ParseOptionalKeywords( && _PyString_Eq(**name, key)) { values[name-argnames] = value; #if CYTHON_AVOID_BORROWED_REFS - value = NULL; // ownership transferred to values + value = NULL; #endif break; } @@ -4437,7 +4463,7 @@ static int __Pyx_ParseOptionalKeywords( if (cmp == 0) { values[name-argnames] = value; #if CYTHON_AVOID_BORROWED_REFS - value = NULL; // ownership transferred to values + value = NULL; #endif break; } @@ -4921,10 +4947,10 @@ __Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj) } /* TypeImport */ -#ifndef __PYX_HAVE_RT_ImportType_3_0_5 -#define __PYX_HAVE_RT_ImportType_3_0_5 -static PyTypeObject *__Pyx_ImportType_3_0_5(PyObject *module, const char *module_name, const char *class_name, - size_t size, size_t alignment, enum __Pyx_ImportType_CheckSize_3_0_5 check_size) +#ifndef __PYX_HAVE_RT_ImportType_3_0_10 +#define __PYX_HAVE_RT_ImportType_3_0_10 +static PyTypeObject *__Pyx_ImportType_3_0_10(PyObject *module, const char *module_name, const char *class_name, + size_t size, size_t alignment, enum __Pyx_ImportType_CheckSize_3_0_10 check_size) { PyObject *result = 0; char warning[200]; @@ -4978,7 +5004,7 @@ static PyTypeObject *__Pyx_ImportType_3_0_5(PyObject *module, const char *module module_name, class_name, size, basicsize+itemsize); goto bad; } - if (check_size == __Pyx_ImportType_CheckSize_Error_3_0_5 && + if (check_size == __Pyx_ImportType_CheckSize_Error_3_0_10 && ((size_t)basicsize > size || (size_t)(basicsize + itemsize) < size)) { PyErr_Format(PyExc_ValueError, "%.200s.%.200s size changed, may indicate binary incompatibility. " @@ -4986,7 +5012,7 @@ static PyTypeObject *__Pyx_ImportType_3_0_5(PyObject *module, const char *module module_name, class_name, size, basicsize, basicsize+itemsize); goto bad; } - else if (check_size == __Pyx_ImportType_CheckSize_Warn_3_0_5 && (size_t)basicsize > size) { + else if (check_size == __Pyx_ImportType_CheckSize_Warn_3_0_10 && (size_t)basicsize > size) { PyOS_snprintf(warning, sizeof(warning), "%s.%s size changed, may indicate binary incompatibility. " "Expected %zd from C header, got %zd from PyObject", @@ -6059,7 +6085,7 @@ static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, default: return NULL; } - return ((_PyCFunctionFastWithKeywords)(void(*)(void))def->ml_meth)(self, args, nargs, kwnames); + return ((__Pyx_PyCFunctionFastWithKeywords)(void(*)(void))def->ml_meth)(self, args, nargs, kwnames); } static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) { @@ -6544,7 +6570,7 @@ static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( #else py_code = PyCode_NewEmpty(filename, funcname, py_line); #endif - Py_XDECREF(py_funcname); // XDECREF since it's only set on Py3 if cline + Py_XDECREF(py_funcname); return py_code; bad: Py_XDECREF(py_funcname); diff --git a/aiohttp/abc.py b/aiohttp/abc.py index ee83899..3fb0240 100644 --- a/aiohttp/abc.py +++ b/aiohttp/abc.py @@ -1,5 +1,6 @@ import asyncio import logging +import socket from abc import ABC, abstractmethod from collections.abc import Sized from http.cookies import BaseCookie, Morsel @@ -14,12 +15,12 @@ List, Optional, Tuple, + TypedDict, ) from multidict import CIMultiDict from yarl import URL -from .helpers import get_running_loop from .typedefs import LooseCookies if TYPE_CHECKING: @@ -119,11 +120,35 @@ def __await__(self) -> Generator[Any, None, StreamResponse]: """Execute the view handler.""" +class ResolveResult(TypedDict): + """Resolve result. + + This is the result returned from an AbstractResolver's + resolve method. + + :param hostname: The hostname that was provided. + :param host: The IP address that was resolved. + :param port: The port that was resolved. + :param family: The address family that was resolved. + :param proto: The protocol that was resolved. + :param flags: The flags that were resolved. + """ + + hostname: str + host: str + port: int + family: int + proto: int + flags: int + + class AbstractResolver(ABC): """Abstract DNS resolver.""" @abstractmethod - async def resolve(self, host: str, port: int, family: int) -> List[Dict[str, Any]]: + async def resolve( + self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET + ) -> List[ResolveResult]: """Return IP address for given hostname""" @abstractmethod @@ -144,7 +169,7 @@ class AbstractCookieJar(Sized, IterableBase): """Abstract Cookie Jar.""" def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: - self._loop = get_running_loop(loop) + self._loop = loop or asyncio.get_running_loop() @abstractmethod def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: diff --git a/aiohttp/base_protocol.py b/aiohttp/base_protocol.py index dc1f24f..2fc2fa6 100644 --- a/aiohttp/base_protocol.py +++ b/aiohttp/base_protocol.py @@ -1,6 +1,7 @@ import asyncio from typing import Optional, cast +from .client_exceptions import ClientConnectionResetError from .helpers import set_exception from .tcp_helpers import tcp_nodelay @@ -85,7 +86,7 @@ def connection_lost(self, exc: Optional[BaseException]) -> None: async def _drain_helper(self) -> None: if not self.connected: - raise ConnectionResetError("Connection lost") + raise ClientConnectionResetError("Connection lost") if not self._paused: return waiter = self._drain_waiter diff --git a/aiohttp/client.py b/aiohttp/client.py index 32d2c3b..93dec00 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -9,7 +9,7 @@ import traceback import warnings from contextlib import suppress -from types import SimpleNamespace, TracebackType +from types import TracebackType from typing import ( TYPE_CHECKING, Any, @@ -27,6 +27,7 @@ Set, Tuple, Type, + TypedDict, TypeVar, Union, ) @@ -38,25 +39,34 @@ from . import hdrs, http, payload from .abc import AbstractCookieJar from .client_exceptions import ( - ClientConnectionError as ClientConnectionError, - ClientConnectorCertificateError as ClientConnectorCertificateError, - ClientConnectorError as ClientConnectorError, - ClientConnectorSSLError as ClientConnectorSSLError, - ClientError as ClientError, - ClientHttpProxyError as ClientHttpProxyError, - ClientOSError as ClientOSError, - ClientPayloadError as ClientPayloadError, - ClientProxyConnectionError as ClientProxyConnectionError, - ClientResponseError as ClientResponseError, - ClientSSLError as ClientSSLError, - ContentTypeError as ContentTypeError, - InvalidURL as InvalidURL, - ServerConnectionError as ServerConnectionError, - ServerDisconnectedError as ServerDisconnectedError, - ServerFingerprintMismatch as ServerFingerprintMismatch, - ServerTimeoutError as ServerTimeoutError, - TooManyRedirects as TooManyRedirects, - WSServerHandshakeError as WSServerHandshakeError, + ClientConnectionError, + ClientConnectionResetError, + ClientConnectorCertificateError, + ClientConnectorDNSError, + ClientConnectorError, + ClientConnectorSSLError, + ClientError, + ClientHttpProxyError, + ClientOSError, + ClientPayloadError, + ClientProxyConnectionError, + ClientResponseError, + ClientSSLError, + ConnectionTimeoutError, + ContentTypeError, + InvalidURL, + InvalidUrlClientError, + InvalidUrlRedirectClientError, + NonHttpUrlClientError, + NonHttpUrlRedirectClientError, + RedirectClientError, + ServerConnectionError, + ServerDisconnectedError, + ServerFingerprintMismatch, + ServerTimeoutError, + SocketTimeoutError, + TooManyRedirects, + WSServerHandshakeError, ) from .client_reqrep import ( ClientRequest as ClientRequest, @@ -67,6 +77,7 @@ ) from .client_ws import ClientWebSocketResponse as ClientWebSocketResponse from .connector import ( + HTTP_AND_EMPTY_SCHEMA_SET, BaseConnector as BaseConnector, NamedPipeConnector as NamedPipeConnector, TCPConnector as TCPConnector, @@ -80,7 +91,6 @@ TimeoutHandle, ceil_timeout, get_env_proxy_for_url, - get_running_loop, method_must_be_empty_body, sentinel, strip_auth_from_url, @@ -89,12 +99,14 @@ from .http_websocket import WSHandshakeError, WSMessage, ws_ext_gen, ws_ext_parse from .streams import FlowControlDataQueue from .tracing import Trace, TraceConfig -from .typedefs import JSONEncoder, LooseCookies, LooseHeaders, StrOrURL +from .typedefs import JSONEncoder, LooseCookies, LooseHeaders, Query, StrOrURL __all__ = ( # client_exceptions "ClientConnectionError", + "ClientConnectionResetError", "ClientConnectorCertificateError", + "ClientConnectorDNSError", "ClientConnectorError", "ClientConnectorSSLError", "ClientError", @@ -104,12 +116,19 @@ "ClientProxyConnectionError", "ClientResponseError", "ClientSSLError", + "ConnectionTimeoutError", "ContentTypeError", "InvalidURL", + "InvalidUrlClientError", + "RedirectClientError", + "NonHttpUrlClientError", + "InvalidUrlRedirectClientError", + "NonHttpUrlRedirectClientError", "ServerConnectionError", "ServerDisconnectedError", "ServerFingerprintMismatch", "ServerTimeoutError", + "SocketTimeoutError", "TooManyRedirects", "WSServerHandshakeError", # client_reqrep @@ -136,6 +155,37 @@ else: SSLContext = None +if sys.version_info >= (3, 11) and TYPE_CHECKING: + from typing import Unpack + + +class _RequestOptions(TypedDict, total=False): + params: Query + data: Any + json: Any + cookies: Union[LooseCookies, None] + headers: Union[LooseHeaders, None] + skip_auto_headers: Union[Iterable[str], None] + auth: Union[BasicAuth, None] + allow_redirects: bool + max_redirects: int + compress: Union[str, bool, None] + chunked: Union[bool, None] + expect100: bool + raise_for_status: Union[None, bool, Callable[[ClientResponse], Awaitable[None]]] + read_until_eof: bool + proxy: Union[StrOrURL, None] + proxy_auth: Union[BasicAuth, None] + timeout: "Union[ClientTimeout, _SENTINEL, None]" + ssl: Union[SSLContext, bool, Fingerprint] + server_hostname: Union[str, None] + proxy_headers: Union[LooseHeaders, None] + trace_request_ctx: Union[Mapping[str, Any], None] + read_bufsize: Union[int, None] + auto_decompress: Union[bool, None] + max_line_size: Union[int, None] + max_field_size: Union[int, None] + @attr.s(auto_attribs=True, frozen=True, slots=True) class ClientTimeout: @@ -160,9 +210,12 @@ class ClientTimeout: # 5 Minute default read timeout -DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60) +DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60, sock_connect=30) + +# https://www.rfc-editor.org/rfc/rfc9110#section-9.2.2 +IDEMPOTENT_METHODS = frozenset({"GET", "HEAD", "OPTIONS", "TRACE", "PUT", "DELETE"}) -_RetType = TypeVar("_RetType") +_RetType = TypeVar("_RetType", ClientResponse, ClientWebSocketResponse) _CharsetResolver = Callable[[ClientResponse, bytes], str] @@ -237,6 +290,21 @@ def __init__( # We initialise _connector to None immediately, as it's referenced in __del__() # and could cause issues if an exception occurs during initialisation. self._connector: Optional[BaseConnector] = None + + if loop is None: + if connector is not None: + loop = connector._loop + + loop = loop or asyncio.get_running_loop() + + if base_url is None or isinstance(base_url, URL): + self._base_url: Optional[URL] = base_url + else: + self._base_url = URL(base_url) + assert ( + self._base_url.origin() == self._base_url + ), "Only absolute URLs without path part are supported" + if timeout is sentinel or timeout is None: self._timeout = DEFAULT_TIMEOUT if read_timeout is not sentinel: @@ -272,19 +340,6 @@ def __init__( "conflict, please setup " "timeout.connect" ) - if loop is None: - if connector is not None: - loop = connector._loop - - loop = get_running_loop(loop) - - if base_url is None or isinstance(base_url, URL): - self._base_url: Optional[URL] = base_url - else: - self._base_url = URL(base_url) - assert ( - self._base_url.origin() == self._base_url - ), "Only absolute URLs without path part are supported" if connector is None: connector = TCPConnector(loop=loop) @@ -369,18 +424,29 @@ def __del__(self, _warnings: Any = warnings) -> None: context["source_traceback"] = self._source_traceback self._loop.call_exception_handler(context) - def request( - self, method: str, url: StrOrURL, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP request.""" - return _RequestContextManager(self._request(method, url, **kwargs)) + if sys.version_info >= (3, 11) and TYPE_CHECKING: + + def request( + self, + method: str, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + else: + + def request( + self, method: str, url: StrOrURL, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP request.""" + return _RequestContextManager(self._request(method, url, **kwargs)) def _build_url(self, str_or_url: StrOrURL) -> URL: url = URL(str_or_url) if self._base_url is None: return url else: - assert not url.is_absolute() and url.path.startswith("/") + assert not url.absolute and url.path.startswith("/") return self._base_url.join(url) async def _request( @@ -388,7 +454,7 @@ async def _request( method: str, str_or_url: StrOrURL, *, - params: Optional[Mapping[str, str]] = None, + params: Query = None, data: Any = None, json: Any = None, cookies: Optional[LooseCookies] = None, @@ -397,7 +463,7 @@ async def _request( auth: Optional[BasicAuth] = None, allow_redirects: bool = True, max_redirects: int = 10, - compress: Optional[str] = None, + compress: Union[str, bool, None] = None, chunked: Optional[bool] = None, expect100: bool = False, raise_for_status: Union[ @@ -413,7 +479,7 @@ async def _request( ssl: Union[SSLContext, bool, Fingerprint] = True, server_hostname: Optional[str] = None, proxy_headers: Optional[LooseHeaders] = None, - trace_request_ctx: Optional[SimpleNamespace] = None, + trace_request_ctx: Optional[Mapping[str, Any]] = None, read_bufsize: Optional[int] = None, auto_decompress: Optional[bool] = None, max_line_size: Optional[int] = None, @@ -446,19 +512,25 @@ async def _request( # Merge with default headers and transform to CIMultiDict headers = self._prepare_headers(headers) - proxy_headers = self._prepare_headers(proxy_headers) try: url = self._build_url(str_or_url) except ValueError as e: - raise InvalidURL(str_or_url) from e + raise InvalidUrlClientError(str_or_url) from e + + assert self._connector is not None + if url.scheme not in self._connector.allowed_protocol_schema_set: + raise NonHttpUrlClientError(url) skip_headers = set(self._skip_auto_headers) if skip_auto_headers is not None: for i in skip_auto_headers: skip_headers.add(istr(i)) - if proxy is not None: + if proxy is None: + proxy_headers = None + else: + proxy_headers = self._prepare_headers(proxy_headers) try: proxy = URL(proxy) except ValueError as e: @@ -505,8 +577,19 @@ async def _request( timer = tm.timer() try: with timer: + # https://www.rfc-editor.org/rfc/rfc9112.html#name-retrying-requests + retry_persistent_connection = method in IDEMPOTENT_METHODS while True: url, auth_from_url = strip_auth_from_url(url) + if not url.raw_host: + # NOTE: Bail early, otherwise, causes `InvalidURL` through + # NOTE: `self._request_class()` below. + err_exc_cls = ( + InvalidUrlRedirectClientError + if redirects + else InvalidUrlClientError + ) + raise err_exc_cls(url) if auth and auth_from_url: raise ValueError( "Cannot combine AUTH argument with " @@ -550,7 +633,7 @@ async def _request( url, params=params, headers=headers, - skip_auto_headers=skip_headers, + skip_auto_headers=skip_headers if skip_headers else None, data=data, cookies=all_cookies, auth=auth, @@ -577,13 +660,12 @@ async def _request( real_timeout.connect, ceil_threshold=real_timeout.ceil_threshold, ): - assert self._connector is not None conn = await self._connector.connect( req, traces=traces, timeout=real_timeout ) except asyncio.TimeoutError as exc: - raise ServerTimeoutError( - "Connection timeout " "to host {}".format(url) + raise ConnectionTimeoutError( + f"Connection timeout to host {url}" ) from exc assert conn.transport is not None @@ -612,6 +694,11 @@ async def _request( except BaseException: conn.close() raise + except (ClientOSError, ServerDisconnectedError): + if retry_persistent_connection: + retry_persistent_connection = False + continue + raise except ClientError: raise except OSError as exc: @@ -659,25 +746,35 @@ async def _request( resp.release() try: - parsed_url = URL( + parsed_redirect_url = URL( r_url, encoded=not self._requote_redirect_url ) - except ValueError as e: - raise InvalidURL(r_url) from e + raise InvalidUrlRedirectClientError( + r_url, + "Server attempted redirecting to a location that does not look like a URL", + ) from e - scheme = parsed_url.scheme - if scheme not in ("http", "https", ""): + scheme = parsed_redirect_url.scheme + if scheme not in HTTP_AND_EMPTY_SCHEMA_SET: resp.close() - raise ValueError("Can redirect only to http or https") + raise NonHttpUrlRedirectClientError(r_url) elif not scheme: - parsed_url = url.join(parsed_url) + parsed_redirect_url = url.join(parsed_redirect_url) - if url.origin() != parsed_url.origin(): + try: + redirect_origin = parsed_redirect_url.origin() + except ValueError as origin_val_err: + raise InvalidUrlRedirectClientError( + parsed_redirect_url, + "Invalid redirect URL origin", + ) from origin_val_err + + if url.origin() != redirect_origin: auth = None headers.pop(hdrs.AUTHORIZATION, None) - url = parsed_url + url = parsed_redirect_url params = {} resp.release() continue @@ -736,11 +833,11 @@ def ws_connect( heartbeat: Optional[float] = None, auth: Optional[BasicAuth] = None, origin: Optional[str] = None, - params: Optional[Mapping[str, str]] = None, + params: Query = None, headers: Optional[LooseHeaders] = None, proxy: Optional[StrOrURL] = None, proxy_auth: Optional[BasicAuth] = None, - ssl: Union[SSLContext, bool, None, Fingerprint] = True, + ssl: Union[SSLContext, bool, Fingerprint] = True, verify_ssl: Optional[bool] = None, fingerprint: Optional[bytes] = None, ssl_context: Optional[SSLContext] = None, @@ -788,11 +885,11 @@ async def _ws_connect( heartbeat: Optional[float] = None, auth: Optional[BasicAuth] = None, origin: Optional[str] = None, - params: Optional[Mapping[str, str]] = None, + params: Query = None, headers: Optional[LooseHeaders] = None, proxy: Optional[StrOrURL] = None, proxy_auth: Optional[BasicAuth] = None, - ssl: Optional[Union[SSLContext, bool, Fingerprint]] = True, + ssl: Union[SSLContext, bool, Fingerprint] = True, verify_ssl: Optional[bool] = None, fingerprint: Optional[bytes] = None, ssl_context: Optional[SSLContext] = None, @@ -828,6 +925,11 @@ async def _ws_connect( # For the sake of backward compatibility, if user passes in None, convert it to True if ssl is None: + warnings.warn( + "ssl=None is deprecated, please use ssl=True", + DeprecationWarning, + stacklevel=2, + ) ssl = True ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) @@ -922,6 +1024,16 @@ async def _ws_connect( assert conn is not None conn_proto = conn.protocol assert conn_proto is not None + + # For WS connection the read_timeout must be either receive_timeout or greater + # None == no timeout, i.e. infinite timeout, so None is the max timeout possible + if receive_timeout is None: + # Reset regardless + conn_proto.read_timeout = receive_timeout + elif conn_proto.read_timeout is not None: + # If read_timeout was set check which wins + conn_proto.read_timeout = max(receive_timeout, conn_proto.read_timeout) + transport = conn.transport assert transport is not None reader: FlowControlDataQueue[WSMessage] = FlowControlDataQueue( @@ -970,61 +1082,111 @@ def _prepare_headers(self, headers: Optional[LooseHeaders]) -> "CIMultiDict[str] added_names.add(key) return result - def get( - self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP GET request.""" - return _RequestContextManager( - self._request(hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs) - ) + if sys.version_info >= (3, 11) and TYPE_CHECKING: + + def get( + self, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + def options( + self, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + def head( + self, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + def post( + self, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + def put( + self, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + def patch( + self, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + def delete( + self, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + else: + + def get( + self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP GET request.""" + return _RequestContextManager( + self._request( + hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs + ) + ) - def options( - self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP OPTIONS request.""" - return _RequestContextManager( - self._request( - hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs + def options( + self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP OPTIONS request.""" + return _RequestContextManager( + self._request( + hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs + ) ) - ) - def head( - self, url: StrOrURL, *, allow_redirects: bool = False, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP HEAD request.""" - return _RequestContextManager( - self._request( - hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs + def head( + self, url: StrOrURL, *, allow_redirects: bool = False, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP HEAD request.""" + return _RequestContextManager( + self._request( + hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs + ) ) - ) - def post( - self, url: StrOrURL, *, data: Any = None, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP POST request.""" - return _RequestContextManager( - self._request(hdrs.METH_POST, url, data=data, **kwargs) - ) + def post( + self, url: StrOrURL, *, data: Any = None, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP POST request.""" + return _RequestContextManager( + self._request(hdrs.METH_POST, url, data=data, **kwargs) + ) - def put( - self, url: StrOrURL, *, data: Any = None, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP PUT request.""" - return _RequestContextManager( - self._request(hdrs.METH_PUT, url, data=data, **kwargs) - ) + def put( + self, url: StrOrURL, *, data: Any = None, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP PUT request.""" + return _RequestContextManager( + self._request(hdrs.METH_PUT, url, data=data, **kwargs) + ) - def patch( - self, url: StrOrURL, *, data: Any = None, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP PATCH request.""" - return _RequestContextManager( - self._request(hdrs.METH_PATCH, url, data=data, **kwargs) - ) + def patch( + self, url: StrOrURL, *, data: Any = None, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP PATCH request.""" + return _RequestContextManager( + self._request(hdrs.METH_PATCH, url, data=data, **kwargs) + ) - def delete(self, url: StrOrURL, **kwargs: Any) -> "_RequestContextManager": - """Perform HTTP DELETE request.""" - return _RequestContextManager(self._request(hdrs.METH_DELETE, url, **kwargs)) + def delete(self, url: StrOrURL, **kwargs: Any) -> "_RequestContextManager": + """Perform HTTP DELETE request.""" + return _RequestContextManager( + self._request(hdrs.METH_DELETE, url, **kwargs) + ) async def close(self) -> None: """Close underlying connector. @@ -1175,7 +1337,7 @@ class _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType __slots__ = ("_coro", "_resp") def __init__(self, coro: Coroutine["asyncio.Future[Any]", None, _RetType]) -> None: - self._coro = coro + self._coro: Coroutine["asyncio.Future[Any]", None, _RetType] = coro def send(self, arg: None) -> "asyncio.Future[Any]": return self._coro.send(arg) @@ -1194,12 +1356,8 @@ def __iter__(self) -> Generator[Any, None, _RetType]: return self.__await__() async def __aenter__(self) -> _RetType: - self._resp = await self._coro - return self._resp - - -class _RequestContextManager(_BaseRequestContextManager[ClientResponse]): - __slots__ = () + self._resp: _RetType = await self._coro + return await self._resp.__aenter__() async def __aexit__( self, @@ -1207,25 +1365,11 @@ async def __aexit__( exc: Optional[BaseException], tb: Optional[TracebackType], ) -> None: - # We're basing behavior on the exception as it can be caused by - # user code unrelated to the status of the connection. If you - # would like to close a connection you must do that - # explicitly. Otherwise connection error handling should kick in - # and close/recycle the connection as required. - self._resp.release() - await self._resp.wait_for_close() - + await self._resp.__aexit__(exc_type, exc, tb) -class _WSRequestContextManager(_BaseRequestContextManager[ClientWebSocketResponse]): - __slots__ = () - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc: Optional[BaseException], - tb: Optional[TracebackType], - ) -> None: - await self._resp.close() +_RequestContextManager = _BaseRequestContextManager[ClientResponse] +_WSRequestContextManager = _BaseRequestContextManager[ClientWebSocketResponse] class _SessionRequestContextManager: @@ -1265,7 +1409,7 @@ def request( method: str, url: StrOrURL, *, - params: Optional[Mapping[str, str]] = None, + params: Query = None, data: Any = None, json: Any = None, headers: Optional[LooseHeaders] = None, diff --git a/aiohttp/client_exceptions.py b/aiohttp/client_exceptions.py index 9b6e442..2cf6cf8 100644 --- a/aiohttp/client_exceptions.py +++ b/aiohttp/client_exceptions.py @@ -2,10 +2,11 @@ import asyncio import warnings -from typing import TYPE_CHECKING, Any, Optional, Tuple, Union +from typing import TYPE_CHECKING, Optional, Tuple, Union -from .http_parser import RawResponseMessage -from .typedefs import LooseHeaders +from multidict import MultiMapping + +from .typedefs import StrOrURL try: import ssl @@ -17,18 +18,23 @@ if TYPE_CHECKING: from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo + from .http_parser import RawResponseMessage else: - RequestInfo = ClientResponse = ConnectionKey = None + RequestInfo = ClientResponse = ConnectionKey = RawResponseMessage = None __all__ = ( "ClientError", "ClientConnectionError", + "ClientConnectionResetError", "ClientOSError", "ClientConnectorError", "ClientProxyConnectionError", "ClientSSLError", + "ClientConnectorDNSError", "ClientConnectorSSLError", "ClientConnectorCertificateError", + "ConnectionTimeoutError", + "SocketTimeoutError", "ServerConnectionError", "ServerTimeoutError", "ServerDisconnectedError", @@ -39,6 +45,11 @@ "ContentTypeError", "ClientPayloadError", "InvalidURL", + "InvalidUrlClientError", + "RedirectClientError", + "NonHttpUrlClientError", + "InvalidUrlRedirectClientError", + "NonHttpUrlRedirectClientError", ) @@ -64,7 +75,7 @@ def __init__( code: Optional[int] = None, status: Optional[int] = None, message: str = "", - headers: Optional[LooseHeaders] = None, + headers: Optional[MultiMapping[str]] = None, ) -> None: self.request_info = request_info if code is not None: @@ -93,7 +104,7 @@ def __str__(self) -> str: return "{}, message={!r}, url={!r}".format( self.status, self.message, - self.request_info.real_url, + str(self.request_info.real_url), ) def __repr__(self) -> str: @@ -150,6 +161,10 @@ class ClientConnectionError(ClientError): """Base class for client socket errors.""" +class ClientConnectionResetError(ClientConnectionError, ConnectionResetError): + """ConnectionResetError""" + + class ClientOSError(ClientConnectionError, OSError): """OSError error.""" @@ -192,6 +207,14 @@ def __str__(self) -> str: __reduce__ = BaseException.__reduce__ +class ClientConnectorDNSError(ClientConnectorError): + """DNS resolution failed during client connection. + + Raised in :class:`aiohttp.connector.TCPConnector` if + DNS resolution fails. + """ + + class ClientProxyConnectionError(ClientConnectorError): """Proxy connection error. @@ -242,6 +265,14 @@ class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError): """Server timeout error.""" +class ConnectionTimeoutError(ServerTimeoutError): + """Connection timeout error.""" + + +class SocketTimeoutError(ServerTimeoutError): + """Socket timeout error.""" + + class ServerFingerprintMismatch(ServerConnectionError): """SSL certificate does not match expected fingerprint.""" @@ -271,17 +302,52 @@ class InvalidURL(ClientError, ValueError): # Derive from ValueError for backward compatibility - def __init__(self, url: Any) -> None: + def __init__(self, url: StrOrURL, description: Union[str, None] = None) -> None: # The type of url is not yarl.URL because the exception can be raised # on URL(url) call - super().__init__(url) + self._url = url + self._description = description + + if description: + super().__init__(url, description) + else: + super().__init__(url) @property - def url(self) -> Any: - return self.args[0] + def url(self) -> StrOrURL: + return self._url + + @property + def description(self) -> "str | None": + return self._description def __repr__(self) -> str: - return f"<{self.__class__.__name__} {self.url}>" + return f"<{self.__class__.__name__} {self}>" + + def __str__(self) -> str: + if self._description: + return f"{self._url} - {self._description}" + return str(self._url) + + +class InvalidUrlClientError(InvalidURL): + """Invalid URL client error.""" + + +class RedirectClientError(ClientError): + """Client redirect error.""" + + +class NonHttpUrlClientError(ClientError): + """Non http URL client error.""" + + +class InvalidUrlRedirectClientError(InvalidUrlClientError, RedirectClientError): + """Invalid URL redirect client error.""" + + +class NonHttpUrlRedirectClientError(NonHttpUrlClientError, RedirectClientError): + """Non http URL redirect client error.""" class ClientSSLError(ClientConnectorError): diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index 723f5aa..8055811 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -7,7 +7,7 @@ ClientOSError, ClientPayloadError, ServerDisconnectedError, - ServerTimeoutError, + SocketTimeoutError, ) from .helpers import ( _EXC_SENTINEL, @@ -50,15 +50,13 @@ def upgraded(self) -> bool: @property def should_close(self) -> bool: - if self._payload is not None and not self._payload.is_eof() or self._upgraded: - return True - return ( self._should_close + or (self._payload is not None and not self._payload.is_eof()) or self._upgraded - or self.exception() is not None + or self._exception is not None or self._payload_parser is not None - or len(self) > 0 + or bool(self._buffer) or bool(self._tail) ) @@ -224,8 +222,16 @@ def _reschedule_timeout(self) -> None: def start_timeout(self) -> None: self._reschedule_timeout() + @property + def read_timeout(self) -> Optional[float]: + return self._read_timeout + + @read_timeout.setter + def read_timeout(self, read_timeout: Optional[float]) -> None: + self._read_timeout = read_timeout + def _on_read_timeout(self) -> None: - exc = ServerTimeoutError("Timeout on reading data from socket") + exc = SocketTimeoutError("Timeout on reading data from socket") self.set_exception(exc) if self._payload is not None: set_exception(self._payload, exc) @@ -261,7 +267,15 @@ def data_received(self, data: bytes) -> None: # closed in this case self.transport.close() # should_close is True after the call - self.set_exception(HttpProcessingError(), underlying_exc) + if isinstance(underlying_exc, HttpProcessingError): + exc = HttpProcessingError( + code=underlying_exc.code, + message=underlying_exc.message, + headers=underlying_exc.headers, + ) + else: + exc = HttpProcessingError() + self.set_exception(exc, underlying_exc) return self._upgraded = upgraded diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index afe719d..d536c0a 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -27,7 +27,7 @@ import attr from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy -from yarl import URL +from yarl import URL, __version__ as yarl_version from . import hdrs, helpers, http, multipart, payload from .abc import AbstractStreamWriter @@ -67,6 +67,7 @@ JSONDecoder, LooseCookies, LooseHeaders, + Query, RawHeaders, ) @@ -89,6 +90,10 @@ _CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]") json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json") +_YARL_SUPPORTS_HOST_SUBCOMPONENT = tuple(map(int, yarl_version.split(".")[:2])) >= ( + 1, + 13, +) def _gen_default_accept_encoding() -> str: @@ -209,7 +214,7 @@ def _merge_ssl_params( return ssl -@attr.s(auto_attribs=True, slots=True, frozen=True) +@attr.s(auto_attribs=True, slots=True, frozen=True, cache_hash=True) class ConnectionKey: # the key should contain an information about used proxy / TLS # to prevent reusing wrong connections from a pool @@ -245,7 +250,8 @@ class ClientRequest: hdrs.ACCEPT_ENCODING: _gen_default_accept_encoding(), } - body = b"" + # Type of body depends on PAYLOAD_REGISTRY, which is dynamic. + body: Any = b"" auth = None response = None @@ -262,14 +268,14 @@ def __init__( method: str, url: URL, *, - params: Optional[Mapping[str, str]] = None, + params: Query = None, headers: Optional[LooseHeaders] = None, - skip_auto_headers: Iterable[str] = frozenset(), + skip_auto_headers: Optional[Iterable[str]] = None, data: Any = None, cookies: Optional[LooseCookies] = None, auth: Optional[BasicAuth] = None, version: http.HttpVersion = http.HttpVersion11, - compress: Optional[str] = None, + compress: Union[str, bool, None] = None, chunked: Optional[bool] = None, expect100: bool = False, loop: Optional[asyncio.AbstractEventLoop] = None, @@ -300,12 +306,9 @@ def __init__( # assert session is not None self._session = cast("ClientSession", session) if params: - q = MultiDict(url.query) - url2 = url.with_query(params) - q.extend(url2.query) - url = url.with_query(q) + url = url.extend_query(params) self.original_url = url - self.url = url.with_fragment(None) + self.url = url.with_fragment(None) if url.raw_fragment else url self.method = method.upper() self.chunked = chunked self.compress = compress @@ -352,7 +355,12 @@ def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None: if self.__writer is not None: self.__writer.remove_done_callback(self.__reset_writer) self.__writer = writer - if writer is not None: + if writer is None: + return + if writer.done(): + # The writer is already done, so we can reset it immediately. + self.__reset_writer() + else: writer.add_done_callback(self.__reset_writer) def is_ssl(self) -> bool: @@ -366,7 +374,7 @@ def ssl(self) -> Union["SSLContext", bool, Fingerprint]: def connection_key(self) -> ConnectionKey: proxy_headers = self.proxy_headers if proxy_headers: - h: Optional[int] = hash(tuple((k, v) for k, v in proxy_headers.items())) + h: Optional[int] = hash(tuple(proxy_headers.items())) else: h = None return ConnectionKey( @@ -401,9 +409,8 @@ def update_host(self, url: URL) -> None: raise InvalidURL(url) # basic auth info - username, password = url.user, url.password - if username: - self.auth = helpers.BasicAuth(username, password or "") + if url.raw_user or url.raw_password: + self.auth = helpers.BasicAuth(url.user or "", url.password or "") def update_version(self, version: Union[http.HttpVersion, str]) -> None: """Convert request version to two elements tuple. @@ -424,33 +431,62 @@ def update_headers(self, headers: Optional[LooseHeaders]) -> None: """Update request headers.""" self.headers: CIMultiDict[str] = CIMultiDict() - # add host - netloc = cast(str, self.url.raw_host) - if helpers.is_ipv6_address(netloc): - netloc = f"[{netloc}]" - # See https://github.com/aio-libs/aiohttp/issues/3636. - netloc = netloc.rstrip(".") - if self.url.port is not None and not self.url.is_default_port(): - netloc += ":" + str(self.url.port) - self.headers[hdrs.HOST] = netloc - - if headers: - if isinstance(headers, (dict, MultiDictProxy, MultiDict)): - headers = headers.items() # type: ignore[assignment] - - for key, value in headers: # type: ignore[misc] - # A special case for Host header - if key.lower() == "host": - self.headers[key] = value - else: - self.headers.add(key, value) + # Build the host header + if _YARL_SUPPORTS_HOST_SUBCOMPONENT: + host = self.url.host_subcomponent + # host_subcomponent is None when the URL is a relative URL. + # but we know we do not have a relative URL here. + assert host is not None + else: + host = cast(str, self.url.raw_host) + if helpers.is_ipv6_address(host): + host = f"[{host}]" + + if host[-1] == ".": + # Remove all trailing dots from the netloc as while + # they are valid FQDNs in DNS, TLS validation fails. + # See https://github.com/aio-libs/aiohttp/issues/3636. + # To avoid string manipulation we only call rstrip if + # the last character is a dot. + host = host.rstrip(".") + + # If explicit port is not None, it means that the port was + # explicitly specified in the URL. In this case we check + # if its not the default port for the scheme and add it to + # the host header. We check explicit_port first because + # yarl caches explicit_port and its likely to already be + # in the cache and non-default port URLs are far less common. + explicit_port = self.url.explicit_port + if explicit_port is not None and not self.url.is_default_port(): + host = f"{host}:{explicit_port}" + + self.headers[hdrs.HOST] = host + + if not headers: + return - def update_auto_headers(self, skip_auto_headers: Iterable[str]) -> None: - self.skip_auto_headers = CIMultiDict( - (hdr, None) for hdr in sorted(skip_auto_headers) - ) - used_headers = self.headers.copy() - used_headers.extend(self.skip_auto_headers) # type: ignore[arg-type] + if isinstance(headers, (dict, MultiDictProxy, MultiDict)): + headers = headers.items() + + for key, value in headers: # type: ignore[misc] + # A special case for Host header + if key.lower() == "host": + self.headers[key] = value + else: + self.headers.add(key, value) + + def update_auto_headers(self, skip_auto_headers: Optional[Iterable[str]]) -> None: + if skip_auto_headers is not None: + self.skip_auto_headers = CIMultiDict( + (hdr, None) for hdr in sorted(skip_auto_headers) + ) + used_headers = self.headers.copy() + used_headers.extend(self.skip_auto_headers) # type: ignore[arg-type] + else: + # Fast path when there are no headers to skip + # which is the most common case. + self.skip_auto_headers = CIMultiDict() + used_headers = self.headers for hdr, val in self.DEFAULT_HEADERS.items(): if hdr not in used_headers: @@ -486,11 +522,12 @@ def update_cookies(self, cookies: Optional[LooseCookies]) -> None: def update_content_encoding(self, data: Any) -> None: """Set request content encoding.""" - if data is None: + if not data: + # Don't compress an empty body. + self.compress = None return - enc = self.headers.get(hdrs.CONTENT_ENCODING, "").lower() - if enc: + if self.headers.get(hdrs.CONTENT_ENCODING): if self.compress: raise ValueError( "compress can not be set " "if Content-Encoding header is set" @@ -566,17 +603,18 @@ def update_body_from_data(self, body: Any) -> None: # copy payload headers assert body.headers - for (key, value) in body.headers.items(): - if key in self.headers: - continue - if key in self.skip_auto_headers: + for key, value in body.headers.items(): + if key in self.headers or key in self.skip_auto_headers: continue self.headers[key] = value def update_expect_continue(self, expect: bool = False) -> None: if expect: self.headers[hdrs.EXPECT] = "100-continue" - elif self.headers.get(hdrs.EXPECT, "").lower() == "100-continue": + elif ( + hdrs.EXPECT in self.headers + and self.headers[hdrs.EXPECT].lower() == "100-continue" + ): expect = True if expect: @@ -588,10 +626,20 @@ def update_proxy( proxy_auth: Optional[BasicAuth], proxy_headers: Optional[LooseHeaders], ) -> None: + self.proxy = proxy + if proxy is None: + self.proxy_auth = None + self.proxy_headers = None + return + if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth): raise ValueError("proxy_auth must be None or BasicAuth() tuple") - self.proxy = proxy self.proxy_auth = proxy_auth + + if proxy_headers is not None and not isinstance( + proxy_headers, (MultiDict, MultiDictProxy) + ): + proxy_headers = CIMultiDict(proxy_headers) self.proxy_headers = proxy_headers def keep_alive(self) -> bool: @@ -614,11 +662,8 @@ async def write_bytes( """Support coroutines that yields bytes objects.""" # 100 response if self._continue is not None: - try: - await writer.drain() - await self._continue - except asyncio.CancelledError: - return + await writer.drain() + await self._continue protocol = conn.protocol assert protocol is not None @@ -627,10 +672,10 @@ async def write_bytes( await self.body.write(writer) else: if isinstance(self.body, (bytes, bytearray)): - self.body = (self.body,) # type: ignore[assignment] + self.body = (self.body,) for chunk in self.body: - await writer.write(chunk) # type: ignore[arg-type] + await writer.write(chunk) except OSError as underlying_exc: reraised_exc = underlying_exc @@ -645,7 +690,9 @@ async def write_bytes( set_exception(protocol, reraised_exc, underlying_exc) except asyncio.CancelledError: - await writer.write_eof() + # Body hasn't been fully sent, so connection can't be reused. + conn.close() + raise except Exception as underlying_exc: set_exception( protocol, @@ -664,33 +711,39 @@ async def send(self, conn: "Connection") -> "ClientResponse": # - not CONNECT proxy must send absolute form URI # - most common is origin form URI if self.method == hdrs.METH_CONNECT: - connect_host = self.url.raw_host - assert connect_host is not None - if helpers.is_ipv6_address(connect_host): - connect_host = f"[{connect_host}]" + if _YARL_SUPPORTS_HOST_SUBCOMPONENT: + connect_host = self.url.host_subcomponent + assert connect_host is not None + else: + connect_host = self.url.raw_host + assert connect_host is not None + if helpers.is_ipv6_address(connect_host): + connect_host = f"[{connect_host}]" path = f"{connect_host}:{self.url.port}" elif self.proxy and not self.is_ssl(): path = str(self.url) else: - path = self.url.raw_path - if self.url.raw_query_string: - path += "?" + self.url.raw_query_string + path = self.url.raw_path_qs protocol = conn.protocol assert protocol is not None writer = StreamWriter( protocol, self.loop, - on_chunk_sent=functools.partial( - self._on_chunk_request_sent, self.method, self.url + on_chunk_sent=( + functools.partial(self._on_chunk_request_sent, self.method, self.url) + if self._traces + else None ), - on_headers_sent=functools.partial( - self._on_headers_request_sent, self.method, self.url + on_headers_sent=( + functools.partial(self._on_headers_request_sent, self.method, self.url) + if self._traces + else None ), ) if self.compress: - writer.enable_compression(self.compress) + writer.enable_compression(self.compress) # type: ignore[arg-type] if self.chunked is not None: writer.enable_chunking() @@ -717,19 +770,26 @@ async def send(self, conn: "Connection") -> "ClientResponse": self.headers[hdrs.CONNECTION] = connection # status + headers - status_line = "{0} {1} HTTP/{v.major}.{v.minor}".format( - self.method, path, v=self.version - ) + v = self.version + status_line = f"{self.method} {path} HTTP/{v.major}.{v.minor}" await writer.write_headers(status_line, self.headers) + coro = self.write_bytes(writer, conn) - self._writer = self.loop.create_task(self.write_bytes(writer, conn)) + if sys.version_info >= (3, 12): + # Optimization for Python 3.12, try to write + # bytes immediately to avoid having to schedule + # the task on the event loop. + task = asyncio.Task(coro, loop=self.loop, eager_start=True) + else: + task = self.loop.create_task(coro) + self._writer = task response_class = self.response_class assert response_class is not None self.response = response_class( self.method, self.original_url, - writer=self._writer, + writer=task, continue100=self._continue, timer=self._timer, request_info=self.request_info, @@ -740,16 +800,23 @@ async def send(self, conn: "Connection") -> "ClientResponse": return self.response async def close(self) -> None: - if self._writer is not None: - with contextlib.suppress(asyncio.CancelledError): - await self._writer + if self.__writer is not None: + try: + await self.__writer + except asyncio.CancelledError: + if ( + sys.version_info >= (3, 11) + and (task := asyncio.current_task()) + and task.cancelling() + ): + raise def terminate(self) -> None: - if self._writer is not None: + if self.__writer is not None: if not self.loop.is_closed(): - self._writer.cancel() - self._writer.remove_done_callback(self.__reset_writer) - self._writer = None + self.__writer.cancel() + self.__writer.remove_done_callback(self.__reset_writer) + self.__writer = None async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None: for trace in self._traces: @@ -762,6 +829,9 @@ async def _on_headers_request_sent( await trace.send_request_headers(method, url, headers) +_CONNECTION_CLOSED_EXCEPTION = ClientConnectionError("Connection closed") + + class ClientResponse(HeadersMixin): # Some of these attributes are None when created, @@ -782,6 +852,7 @@ class ClientResponse(HeadersMixin): # post-init stage allows to not change ctor signature _closed = True # to allow __del__ for non-initialized properly response _released = False + _in_context = False __writer = None def __init__( @@ -803,9 +874,9 @@ def __init__( self.cookies = SimpleCookie() self._real_url = url - self._url = url.with_fragment(None) - self._body: Any = None - self._writer: Optional[asyncio.Task[None]] = writer + self._url = url.with_fragment(None) if url.raw_fragment else url + self._body: Optional[bytes] = None + self._writer = writer self._continue = continue100 # None by default self._closed = True self._history: Tuple[ClientResponse, ...] = () @@ -820,9 +891,9 @@ def __init__( # work after the response has finished reading the body. if session is None: # TODO: Fix session=None in tests (see ClientRequest.__init__). - self._resolve_charset: Callable[ - ["ClientResponse", bytes], str - ] = lambda *_: "utf-8" + self._resolve_charset: Callable[["ClientResponse", bytes], str] = ( + lambda *_: "utf-8" + ) else: self._resolve_charset = session._resolve_charset if loop.get_debug(): @@ -833,14 +904,25 @@ def __reset_writer(self, _: object = None) -> None: @property def _writer(self) -> Optional["asyncio.Task[None]"]: + """The writer task for streaming data. + + _writer is only provided for backwards compatibility + for subclasses that may need to access it. + """ return self.__writer @_writer.setter def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None: + """Set the writer task for streaming data.""" if self.__writer is not None: self.__writer.remove_done_callback(self.__reset_writer) self.__writer = writer - if writer is not None: + if writer is None: + return + if writer.done(): + # The writer is already done, so we can reset it immediately. + self.__reset_writer() + else: writer.add_done_callback(self.__reset_writer) @reify @@ -1066,7 +1148,12 @@ def raise_for_status(self) -> None: if not self.ok: # reason should always be not None for a started response assert self.reason is not None - self.release() + + # If we're in a context we can rely on __aexit__() to release as the + # exception propagates. + if not self._in_context: + self.release() + raise ClientResponseError( self.request_info, self.history, @@ -1077,31 +1164,47 @@ def raise_for_status(self) -> None: def _release_connection(self) -> None: if self._connection is not None: - if self._writer is None: + if self.__writer is None: self._connection.release() self._connection = None else: - self._writer.add_done_callback(lambda f: self._release_connection()) + self.__writer.add_done_callback(lambda f: self._release_connection()) async def _wait_released(self) -> None: - if self._writer is not None: - await self._writer + if self.__writer is not None: + try: + await self.__writer + except asyncio.CancelledError: + if ( + sys.version_info >= (3, 11) + and (task := asyncio.current_task()) + and task.cancelling() + ): + raise self._release_connection() def _cleanup_writer(self) -> None: - if self._writer is not None: - self._writer.cancel() + if self.__writer is not None: + self.__writer.cancel() self._session = None def _notify_content(self) -> None: content = self.content if content and content.exception() is None: - set_exception(content, ClientConnectionError("Connection closed")) + set_exception(content, _CONNECTION_CLOSED_EXCEPTION) self._released = True async def wait_for_close(self) -> None: - if self._writer is not None: - await self._writer + if self.__writer is not None: + try: + await self.__writer + except asyncio.CancelledError: + if ( + sys.version_info >= (3, 11) + and (task := asyncio.current_task()) + and task.cancelling() + ): + raise self.release() async def read(self) -> bytes: @@ -1122,7 +1225,7 @@ async def read(self) -> bytes: protocol = self._connection and self._connection.protocol if protocol is None or not protocol.upgraded: await self._wait_released() # Underlying connection released - return self._body # type: ignore[no-any-return] + return self._body def get_encoding(self) -> str: ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower() @@ -1130,7 +1233,7 @@ def get_encoding(self) -> str: encoding = mimetype.parameters.get("charset") if encoding: - with contextlib.suppress(LookupError): + with contextlib.suppress(LookupError, ValueError): return codecs.lookup(encoding).name if mimetype.type == "application" and ( @@ -1155,9 +1258,7 @@ async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> if encoding is None: encoding = self.get_encoding() - return self._body.decode( # type: ignore[no-any-return,union-attr] - encoding, errors=errors - ) + return self._body.decode(encoding, errors=errors) # type: ignore[union-attr] async def json( self, @@ -1176,6 +1277,7 @@ async def json( raise ContentTypeError( self.request_info, self.history, + status=self.status, message=( "Attempt to decode JSON with " "unexpected mimetype: %s" % ctype ), @@ -1192,6 +1294,7 @@ async def json( return loads(stripped.decode(encoding)) async def __aenter__(self) -> "ClientResponse": + self._in_context = True return self async def __aexit__( @@ -1200,6 +1303,7 @@ async def __aexit__( exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: + self._in_context = False # similar to _RequestContextManager, we do not need to check # for exceptions, response object can close connection # if state is broken diff --git a/aiohttp/client_ws.py b/aiohttp/client_ws.py index d9c74a3..c6b5da5 100644 --- a/aiohttp/client_ws.py +++ b/aiohttp/client_ws.py @@ -2,11 +2,12 @@ import asyncio import sys -from typing import Any, Optional, cast +from types import TracebackType +from typing import Any, Optional, Type, cast -from .client_exceptions import ClientError +from .client_exceptions import ClientError, ServerTimeoutError from .client_reqrep import ClientResponse -from .helpers import call_later, set_result +from .helpers import calculate_timeout_when, set_result from .http import ( WS_CLOSED_MESSAGE, WS_CLOSING_MESSAGE, @@ -62,63 +63,123 @@ def __init__( self._autoping = autoping self._heartbeat = heartbeat self._heartbeat_cb: Optional[asyncio.TimerHandle] = None + self._heartbeat_when: float = 0.0 if heartbeat is not None: self._pong_heartbeat = heartbeat / 2.0 self._pong_response_cb: Optional[asyncio.TimerHandle] = None self._loop = loop - self._waiting: Optional[asyncio.Future[bool]] = None + self._waiting: bool = False + self._close_wait: Optional[asyncio.Future[None]] = None self._exception: Optional[BaseException] = None self._compress = compress self._client_notakeover = client_notakeover + self._ping_task: Optional[asyncio.Task[None]] = None self._reset_heartbeat() def _cancel_heartbeat(self) -> None: - if self._pong_response_cb is not None: - self._pong_response_cb.cancel() - self._pong_response_cb = None - + self._cancel_pong_response_cb() if self._heartbeat_cb is not None: self._heartbeat_cb.cancel() self._heartbeat_cb = None + if self._ping_task is not None: + self._ping_task.cancel() + self._ping_task = None - def _reset_heartbeat(self) -> None: - self._cancel_heartbeat() + def _cancel_pong_response_cb(self) -> None: + if self._pong_response_cb is not None: + self._pong_response_cb.cancel() + self._pong_response_cb = None - if self._heartbeat is not None: - self._heartbeat_cb = call_later( - self._send_heartbeat, - self._heartbeat, - self._loop, - timeout_ceil_threshold=self._conn._connector._timeout_ceil_threshold - if self._conn is not None - else 5, - ) + def _reset_heartbeat(self) -> None: + if self._heartbeat is None: + return + self._cancel_pong_response_cb() + loop = self._loop + assert loop is not None + conn = self._conn + timeout_ceil_threshold = ( + conn._connector._timeout_ceil_threshold if conn is not None else 5 + ) + now = loop.time() + when = calculate_timeout_when(now, self._heartbeat, timeout_ceil_threshold) + self._heartbeat_when = when + if self._heartbeat_cb is None: + # We do not cancel the previous heartbeat_cb here because + # it generates a significant amount of TimerHandle churn + # which causes asyncio to rebuild the heap frequently. + # Instead _send_heartbeat() will reschedule the next + # heartbeat if it fires too early. + self._heartbeat_cb = loop.call_at(when, self._send_heartbeat) def _send_heartbeat(self) -> None: - if self._heartbeat is not None and not self._closed: - # fire-and-forget a task is not perfect but maybe ok for - # sending ping. Otherwise we need a long-living heartbeat - # task in the class. - self._loop.create_task(self._writer.ping()) - - if self._pong_response_cb is not None: - self._pong_response_cb.cancel() - self._pong_response_cb = call_later( - self._pong_not_received, - self._pong_heartbeat, - self._loop, - timeout_ceil_threshold=self._conn._connector._timeout_ceil_threshold - if self._conn is not None - else 5, + self._heartbeat_cb = None + loop = self._loop + now = loop.time() + if now < self._heartbeat_when: + # Heartbeat fired too early, reschedule + self._heartbeat_cb = loop.call_at( + self._heartbeat_when, self._send_heartbeat ) + return + + conn = self._conn + timeout_ceil_threshold = ( + conn._connector._timeout_ceil_threshold if conn is not None else 5 + ) + when = calculate_timeout_when(now, self._pong_heartbeat, timeout_ceil_threshold) + self._cancel_pong_response_cb() + self._pong_response_cb = loop.call_at(when, self._pong_not_received) + + if sys.version_info >= (3, 12): + # Optimization for Python 3.12, try to send the ping + # immediately to avoid having to schedule + # the task on the event loop. + ping_task = asyncio.Task(self._writer.ping(), loop=loop, eager_start=True) + else: + ping_task = loop.create_task(self._writer.ping()) + + if not ping_task.done(): + self._ping_task = ping_task + ping_task.add_done_callback(self._ping_task_done) + else: + self._ping_task_done(ping_task) + + def _ping_task_done(self, task: "asyncio.Task[None]") -> None: + """Callback for when the ping task completes.""" + if not task.cancelled() and (exc := task.exception()): + self._handle_ping_pong_exception(exc) + self._ping_task = None def _pong_not_received(self) -> None: - if not self._closed: - self._closed = True - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - self._exception = asyncio.TimeoutError() - self._response.close() + self._handle_ping_pong_exception(ServerTimeoutError()) + + def _handle_ping_pong_exception(self, exc: BaseException) -> None: + """Handle exceptions raised during ping/pong processing.""" + if self._closed: + return + self._set_closed() + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._exception = exc + self._response.close() + if self._waiting and not self._closing: + self._reader.feed_data(WSMessage(WSMsgType.ERROR, exc, None)) + + def _set_closed(self) -> None: + """Set the connection to closed. + + Cancel any heartbeat timers and set the closed flag. + """ + self._closed = True + self._cancel_heartbeat() + + def _set_closing(self) -> None: + """Set the connection to closing. + + Cancel any heartbeat timers and set the closing flag. + """ + self._closing = True + self._cancel_heartbeat() @property def closed(self) -> bool: @@ -181,14 +242,15 @@ async def send_json( async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool: # we need to break `receive()` cycle first, # `close()` may be called from different task - if self._waiting is not None and not self._closing: - self._closing = True + if self._waiting and not self._closing: + assert self._loop is not None + self._close_wait = self._loop.create_future() + self._set_closing() self._reader.feed_data(WS_CLOSING_MESSAGE, 0) - await self._waiting + await self._close_wait if not self._closed: - self._cancel_heartbeat() - self._closed = True + self._set_closed() try: await self._writer.close(code, message) except asyncio.CancelledError: @@ -219,7 +281,7 @@ async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bo self._response.close() return True - if msg.type == WSMsgType.CLOSE: + if msg.type is WSMsgType.CLOSE: self._close_code = msg.data self._response.close() return True @@ -227,8 +289,10 @@ async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bo return False async def receive(self, timeout: Optional[float] = None) -> WSMessage: + receive_timeout = timeout or self._receive_timeout + while True: - if self._waiting is not None: + if self._waiting: raise RuntimeError("Concurrent call to receive() is not allowed") if self._closed: @@ -238,15 +302,22 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: return WS_CLOSED_MESSAGE try: - self._waiting = self._loop.create_future() + self._waiting = True try: - async with async_timeout.timeout(timeout or self._receive_timeout): + if receive_timeout: + # Entering the context manager and creating + # Timeout() object can take almost 50% of the + # run time in this loop so we avoid it if + # there is no read timeout. + async with async_timeout.timeout(receive_timeout): + msg = await self._reader.read() + else: msg = await self._reader.read() self._reset_heartbeat() finally: - waiter = self._waiting - self._waiting = None - set_result(waiter, True) + self._waiting = False + if self._close_wait: + set_result(self._close_wait, None) except (asyncio.CancelledError, asyncio.TimeoutError): self._close_code = WSCloseCode.ABNORMAL_CLOSURE raise @@ -255,7 +326,8 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: await self.close() return WSMessage(WSMsgType.CLOSED, None, None) except ClientError: - self._closed = True + # Likely ServerDisconnectedError when connection is lost + self._set_closed() self._close_code = WSCloseCode.ABNORMAL_CLOSURE return WS_CLOSED_MESSAGE except WebSocketError as exc: @@ -264,35 +336,35 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: return WSMessage(WSMsgType.ERROR, exc, None) except Exception as exc: self._exception = exc - self._closing = True + self._set_closing() self._close_code = WSCloseCode.ABNORMAL_CLOSURE await self.close() return WSMessage(WSMsgType.ERROR, exc, None) - if msg.type == WSMsgType.CLOSE: - self._closing = True + if msg.type is WSMsgType.CLOSE: + self._set_closing() self._close_code = msg.data if not self._closed and self._autoclose: await self.close() - elif msg.type == WSMsgType.CLOSING: - self._closing = True - elif msg.type == WSMsgType.PING and self._autoping: + elif msg.type is WSMsgType.CLOSING: + self._set_closing() + elif msg.type is WSMsgType.PING and self._autoping: await self.pong(msg.data) continue - elif msg.type == WSMsgType.PONG and self._autoping: + elif msg.type is WSMsgType.PONG and self._autoping: continue return msg async def receive_str(self, *, timeout: Optional[float] = None) -> str: msg = await self.receive(timeout) - if msg.type != WSMsgType.TEXT: + if msg.type is not WSMsgType.TEXT: raise TypeError(f"Received message {msg.type}:{msg.data!r} is not str") return cast(str, msg.data) async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes: msg = await self.receive(timeout) - if msg.type != WSMsgType.BINARY: + if msg.type is not WSMsgType.BINARY: raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes") return cast(bytes, msg.data) @@ -313,3 +385,14 @@ async def __anext__(self) -> WSMessage: if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED): raise StopAsyncIteration return msg + + async def __aenter__(self) -> "ClientWebSocketResponse": + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + await self.close() diff --git a/aiohttp/compression_utils.py b/aiohttp/compression_utils.py index 9631d37..ab4a2f1 100644 --- a/aiohttp/compression_utils.py +++ b/aiohttp/compression_utils.py @@ -50,9 +50,11 @@ def __init__( max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, ): super().__init__( - mode=encoding_to_mode(encoding, suppress_deflate_header) - if wbits is None - else wbits, + mode=( + encoding_to_mode(encoding, suppress_deflate_header) + if wbits is None + else wbits + ), executor=executor, max_sync_chunk_size=max_sync_chunk_size, ) diff --git a/aiohttp/connector.py b/aiohttp/connector.py index f95ebe8..1bdd14b 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -1,6 +1,7 @@ import asyncio import functools import random +import socket import sys import traceback import warnings @@ -8,7 +9,7 @@ from contextlib import suppress from http import HTTPStatus from http.cookies import SimpleCookie -from itertools import cycle, islice +from itertools import chain, cycle, islice from time import monotonic from types import TracebackType from typing import ( @@ -22,6 +23,7 @@ List, Literal, Optional, + Sequence, Set, Tuple, Type, @@ -29,13 +31,15 @@ cast, ) +import aiohappyeyeballs import attr from . import hdrs, helpers -from .abc import AbstractResolver +from .abc import AbstractResolver, ResolveResult from .client_exceptions import ( ClientConnectionError, ClientConnectorCertificateError, + ClientConnectorDNSError, ClientConnectorError, ClientConnectorSSLError, ClientHttpProxyError, @@ -47,8 +51,14 @@ ) from .client_proto import ResponseHandler from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params -from .helpers import ceil_timeout, get_running_loop, is_ip_address, noop, sentinel -from .locks import EventResultOrError +from .helpers import ( + ceil_timeout, + is_ip_address, + noop, + sentinel, + set_exception, + set_result, +) from .resolver import DefaultResolver try: @@ -60,6 +70,14 @@ SSLContext = object # type: ignore[misc,assignment] +EMPTY_SCHEMA_SET = frozenset({""}) +HTTP_SCHEMA_SET = frozenset({"http", "https"}) +WS_SCHEMA_SET = frozenset({"ws", "wss"}) + +HTTP_AND_EMPTY_SCHEMA_SET = HTTP_SCHEMA_SET | EMPTY_SCHEMA_SET +HIGH_LEVEL_SCHEMA_SET = HTTP_AND_EMPTY_SCHEMA_SET | WS_SCHEMA_SET + + __all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector") @@ -208,6 +226,8 @@ class BaseConnector: # abort transport after 2 seconds (cleanup broken connections) _cleanup_closed_period = 2.0 + allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET + def __init__( self, *, @@ -229,7 +249,7 @@ def __init__( if keepalive_timeout is sentinel: keepalive_timeout = 15.0 - loop = get_running_loop(loop) + loop = loop or asyncio.get_running_loop() self._timeout_ceil_threshold = timeout_ceil_threshold self._closed = False @@ -240,14 +260,16 @@ def __init__( self._limit = limit self._limit_per_host = limit_per_host self._acquired: Set[ResponseHandler] = set() - self._acquired_per_host: DefaultDict[ - ConnectionKey, Set[ResponseHandler] - ] = defaultdict(set) + self._acquired_per_host: DefaultDict[ConnectionKey, Set[ResponseHandler]] = ( + defaultdict(set) + ) self._keepalive_timeout = cast(float, keepalive_timeout) self._force_close = force_close # {host_key: FIFO list of waiters} - self._waiters = defaultdict(deque) # type: ignore[var-annotated] + self._waiters: DefaultDict[ConnectionKey, deque[asyncio.Future[None]]] = ( + defaultdict(deque) + ) self._loop = loop self._factory = functools.partial(ResponseHandler, loop=loop) @@ -377,13 +399,10 @@ def _cleanup(self) -> None: def _drop_acquired_per_host( self, key: "ConnectionKey", val: ResponseHandler ) -> None: - acquired_per_host = self._acquired_per_host - if key not in acquired_per_host: - return - conns = acquired_per_host[key] - conns.remove(val) - if not conns: - del self._acquired_per_host[key] + if conns := self._acquired_per_host.get(key): + conns.remove(val) + if not conns: + del self._acquired_per_host[key] def _cleanup_closed(self) -> None: """Double confirmation for transport close. @@ -501,7 +520,7 @@ async def connect( # Wait if there are no available connections or if there are/were # waiters (i.e. don't steal connection from a waiter about to wake up) if available <= 0 or key in self._waiters: - fut = self._loop.create_future() + fut: asyncio.Future[None] = self._loop.create_future() # This connection will now count towards the limit. self._waiters[key].append(fut) @@ -668,20 +687,21 @@ def _release( if key.is_ssl and not self._cleanup_closed_disabled: self._cleanup_closed_transports.append(transport) - else: - conns = self._conns.get(key) - if conns is None: - conns = self._conns[key] = [] - conns.append((protocol, self._loop.time())) - - if self._cleanup_handle is None: - self._cleanup_handle = helpers.weakref_handle( - self, - "_cleanup", - self._keepalive_timeout, - self._loop, - timeout_ceil_threshold=self._timeout_ceil_threshold, - ) + return + + conns = self._conns.get(key) + if conns is None: + conns = self._conns[key] = [] + conns.append((protocol, self._loop.time())) + + if self._cleanup_handle is None: + self._cleanup_handle = helpers.weakref_handle( + self, + "_cleanup", + self._keepalive_timeout, + self._loop, + timeout_ceil_threshold=self._timeout_ceil_threshold, + ) async def _create_connection( self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" @@ -691,14 +711,14 @@ async def _create_connection( class _DNSCacheTable: def __init__(self, ttl: Optional[float] = None) -> None: - self._addrs_rr: Dict[Tuple[str, int], Tuple[Iterator[Dict[str, Any]], int]] = {} + self._addrs_rr: Dict[Tuple[str, int], Tuple[Iterator[ResolveResult], int]] = {} self._timestamps: Dict[Tuple[str, int], float] = {} self._ttl = ttl def __contains__(self, host: object) -> bool: return host in self._addrs_rr - def add(self, key: Tuple[str, int], addrs: List[Dict[str, Any]]) -> None: + def add(self, key: Tuple[str, int], addrs: List[ResolveResult]) -> None: self._addrs_rr[key] = (cycle(addrs), len(addrs)) if self._ttl is not None: @@ -714,7 +734,7 @@ def clear(self) -> None: self._addrs_rr.clear() self._timestamps.clear() - def next_addrs(self, key: Tuple[str, int]) -> List[Dict[str, Any]]: + def next_addrs(self, key: Tuple[str, int]) -> List[ResolveResult]: loop, length = self._addrs_rr[key] addrs = list(islice(loop, length)) # Consume one more element to shift internal state of `cycle` @@ -728,6 +748,35 @@ def expired(self, key: Tuple[str, int]) -> bool: return self._timestamps[key] + self._ttl < monotonic() +def _make_ssl_context(verified: bool) -> SSLContext: + """Create SSL context. + + This method is not async-friendly and should be called from a thread + because it will load certificates from disk and do other blocking I/O. + """ + if ssl is None: + # No ssl support + return None + if verified: + return ssl.create_default_context() + sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) + sslcontext.options |= ssl.OP_NO_SSLv2 + sslcontext.options |= ssl.OP_NO_SSLv3 + sslcontext.check_hostname = False + sslcontext.verify_mode = ssl.CERT_NONE + sslcontext.options |= ssl.OP_NO_COMPRESSION + sslcontext.set_default_verify_paths() + return sslcontext + + +# The default SSLContext objects are created at import time +# since they do blocking I/O to load certificates from disk, +# and imports should always be done before the event loop starts +# or in a thread. +_SSL_CONTEXT_VERIFIED = _make_ssl_context(True) +_SSL_CONTEXT_UNVERIFIED = _make_ssl_context(False) + + class TCPConnector(BaseConnector): """TCP connector. @@ -735,7 +784,7 @@ class TCPConnector(BaseConnector): fingerprint - Pass the binary sha256 digest of the expected certificate in DER format to verify that the certificate the server presents matches. See also - https://en.wikipedia.org/wiki/Transport_Layer_Security#Certificate_pinning + https://en.wikipedia.org/wiki/HTTP_Public_Key_Pinning resolver - Enable DNS lookups and use this resolver use_dns_cache - Use memory cache for DNS lookups. @@ -750,9 +799,15 @@ class TCPConnector(BaseConnector): limit_per_host - Number of simultaneous connections to one host. enable_cleanup_closed - Enables clean-up closed ssl transports. Disabled by default. + happy_eyeballs_delay - This is the “Connection Attempt Delay” + as defined in RFC 8305. To disable + the happy eyeballs algorithm, set to None. + interleave - “First Address Family Count” as defined in RFC 8305 loop - Optional event loop. """ + allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"tcp"}) + def __init__( self, *, @@ -760,7 +815,7 @@ def __init__( fingerprint: Optional[bytes] = None, use_dns_cache: bool = True, ttl_dns_cache: Optional[int] = 10, - family: int = 0, + family: socket.AddressFamily = socket.AddressFamily.AF_UNSPEC, ssl_context: Optional[SSLContext] = None, ssl: Union[bool, Fingerprint, SSLContext] = True, local_addr: Optional[Tuple[str, int]] = None, @@ -772,6 +827,8 @@ def __init__( enable_cleanup_closed: bool = False, loop: Optional[asyncio.AbstractEventLoop] = None, timeout_ceil_threshold: float = 5, + happy_eyeballs_delay: Optional[float] = 0.25, + interleave: Optional[int] = None, ): super().__init__( keepalive_timeout=keepalive_timeout, @@ -790,14 +847,22 @@ def __init__( self._use_dns_cache = use_dns_cache self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache) - self._throttle_dns_events: Dict[Tuple[str, int], EventResultOrError] = {} + self._throttle_dns_futures: Dict[ + Tuple[str, int], Set["asyncio.Future[None]"] + ] = {} self._family = family - self._local_addr = local_addr + self._local_addr_infos = aiohappyeyeballs.addr_to_addr_infos(local_addr) + self._happy_eyeballs_delay = happy_eyeballs_delay + self._interleave = interleave + self._resolve_host_tasks: Set["asyncio.Task[List[ResolveResult]]"] = set() def close(self) -> Awaitable[None]: """Close all ongoing DNS calls.""" - for ev in self._throttle_dns_events.values(): - ev.cancel() + for fut in chain.from_iterable(self._throttle_dns_futures.values()): + fut.cancel() + + for t in self._resolve_host_tasks: + t.cancel() return super().close() @@ -823,8 +888,8 @@ def clear_dns_cache( self._cached_hosts.clear() async def _resolve_host( - self, host: str, port: int, traces: Optional[List["Trace"]] = None - ) -> List[Dict[str, Any]]: + self, host: str, port: int, traces: Optional[Sequence["Trace"]] = None + ) -> List[ResolveResult]: """Resolve host and return list of addresses.""" if is_ip_address(host): return [ @@ -862,25 +927,51 @@ async def _resolve_host( await trace.send_dns_cache_hit(host) return result + futures: Set["asyncio.Future[None]"] # # If multiple connectors are resolving the same host, we wait # for the first one to resolve and then use the result for all of them. - # We use a throttle event to ensure that we only resolve the host once + # We use a throttle to ensure that we only resolve the host once # and then use the result for all the waiters. # + if key in self._throttle_dns_futures: + # get futures early, before any await (#4014) + futures = self._throttle_dns_futures[key] + future: asyncio.Future[None] = self._loop.create_future() + futures.add(future) + if traces: + for trace in traces: + await trace.send_dns_cache_hit(host) + try: + await future + finally: + futures.discard(future) + return self._cached_hosts.next_addrs(key) + + # update dict early, before any await (#4014) + self._throttle_dns_futures[key] = futures = set() # In this case we need to create a task to ensure that we can shield # the task from cancellation as cancelling this lookup should not cancel # the underlying lookup or else the cancel event will get broadcast to # all the waiters across all connections. # - resolved_host_task = asyncio.create_task( - self._resolve_host_with_throttle(key, host, port, traces) - ) + coro = self._resolve_host_with_throttle(key, host, port, futures, traces) + loop = asyncio.get_running_loop() + if sys.version_info >= (3, 12): + # Optimization for Python 3.12, try to send immediately + resolved_host_task = asyncio.Task(coro, loop=loop, eager_start=True) + else: + resolved_host_task = loop.create_task(coro) + + if not resolved_host_task.done(): + self._resolve_host_tasks.add(resolved_host_task) + resolved_host_task.add_done_callback(self._resolve_host_tasks.discard) + try: return await asyncio.shield(resolved_host_task) except asyncio.CancelledError: - def drop_exception(fut: "asyncio.Future[List[Dict[str, Any]]]") -> None: + def drop_exception(fut: "asyncio.Future[List[ResolveResult]]") -> None: with suppress(Exception, asyncio.CancelledError): fut.result() @@ -892,42 +983,39 @@ async def _resolve_host_with_throttle( key: Tuple[str, int], host: str, port: int, - traces: Optional[List["Trace"]], - ) -> List[Dict[str, Any]]: - """Resolve host with a dns events throttle.""" - if key in self._throttle_dns_events: - # get event early, before any await (#4014) - event = self._throttle_dns_events[key] + futures: Set["asyncio.Future[None]"], + traces: Optional[Sequence["Trace"]], + ) -> List[ResolveResult]: + """Resolve host and set result for all waiters. + + This method must be run in a task and shielded from cancellation + to avoid cancelling the underlying lookup. + """ + if traces: + for trace in traces: + await trace.send_dns_cache_miss(host) + try: if traces: for trace in traces: - await trace.send_dns_cache_hit(host) - await event.wait() - else: - # update dict early, before any await (#4014) - self._throttle_dns_events[key] = EventResultOrError(self._loop) + await trace.send_dns_resolvehost_start(host) + + addrs = await self._resolver.resolve(host, port, family=self._family) if traces: for trace in traces: - await trace.send_dns_cache_miss(host) - try: - - if traces: - for trace in traces: - await trace.send_dns_resolvehost_start(host) - - addrs = await self._resolver.resolve(host, port, family=self._family) - if traces: - for trace in traces: - await trace.send_dns_resolvehost_end(host) + await trace.send_dns_resolvehost_end(host) - self._cached_hosts.add(key, addrs) - self._throttle_dns_events[key].set() - except BaseException as e: - # any DNS exception, independently of the implementation - # is set for the waiters to raise the same exception. - self._throttle_dns_events[key].set(exc=e) - raise - finally: - self._throttle_dns_events.pop(key) + self._cached_hosts.add(key, addrs) + for fut in futures: + set_result(fut, None) + except BaseException as e: + # any DNS exception is set for the waiters to raise the same exception. + # This coro is always run in task that is shielded from cancellation so + # we should never be propagating cancellation here. + for fut in futures: + set_exception(fut, e) + raise + finally: + self._throttle_dns_futures.pop(key) return self._cached_hosts.next_addrs(key) @@ -945,29 +1033,6 @@ async def _create_connection( return proto - @staticmethod - @functools.lru_cache(None) - def _make_ssl_context(verified: bool) -> SSLContext: - if verified: - return ssl.create_default_context() - else: - sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) - sslcontext.options |= ssl.OP_NO_SSLv2 - sslcontext.options |= ssl.OP_NO_SSLv3 - sslcontext.check_hostname = False - sslcontext.verify_mode = ssl.CERT_NONE - try: - sslcontext.options |= ssl.OP_NO_COMPRESSION - except AttributeError as attr_err: - warnings.warn( - "{!s}: The Python interpreter is compiled " - "against OpenSSL < 1.0.0. Ref: " - "https://docs.python.org/3/library/ssl.html" - "#ssl.OP_NO_COMPRESSION".format(attr_err), - ) - sslcontext.set_default_verify_paths() - return sslcontext - def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]: """Logic to get the correct SSL context @@ -982,25 +1047,25 @@ def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]: 3. if verify_ssl is False in req, generate a SSL context that won't verify """ - if req.is_ssl(): - if ssl is None: # pragma: no cover - raise RuntimeError("SSL is not supported.") - sslcontext = req.ssl - if isinstance(sslcontext, ssl.SSLContext): - return sslcontext - if sslcontext is not True: - # not verified or fingerprinted - return self._make_ssl_context(False) - sslcontext = self._ssl - if isinstance(sslcontext, ssl.SSLContext): - return sslcontext - if sslcontext is not True: - # not verified or fingerprinted - return self._make_ssl_context(False) - return self._make_ssl_context(True) - else: + if not req.is_ssl(): return None + if ssl is None: # pragma: no cover + raise RuntimeError("SSL is not supported.") + sslcontext = req.ssl + if isinstance(sslcontext, ssl.SSLContext): + return sslcontext + if sslcontext is not True: + # not verified or fingerprinted + return _SSL_CONTEXT_UNVERIFIED + sslcontext = self._ssl + if isinstance(sslcontext, ssl.SSLContext): + return sslcontext + if sslcontext is not True: + # not verified or fingerprinted + return _SSL_CONTEXT_UNVERIFIED + return _SSL_CONTEXT_VERIFIED + def _get_fingerprint(self, req: ClientRequest) -> Optional["Fingerprint"]: ret = req.ssl if isinstance(ret, Fingerprint): @@ -1011,6 +1076,36 @@ def _get_fingerprint(self, req: ClientRequest) -> Optional["Fingerprint"]: return None async def _wrap_create_connection( + self, + *args: Any, + addr_infos: List[aiohappyeyeballs.AddrInfoType], + req: ClientRequest, + timeout: "ClientTimeout", + client_error: Type[Exception] = ClientConnectorError, + **kwargs: Any, + ) -> Tuple[asyncio.Transport, ResponseHandler]: + try: + async with ceil_timeout( + timeout.sock_connect, ceil_threshold=timeout.ceil_threshold + ): + sock = await aiohappyeyeballs.start_connection( + addr_infos=addr_infos, + local_addr_infos=self._local_addr_infos, + happy_eyeballs_delay=self._happy_eyeballs_delay, + interleave=self._interleave, + loop=self._loop, + ) + return await self._loop.create_connection(*args, **kwargs, sock=sock) + except cert_errors as exc: + raise ClientConnectorCertificateError(req.connection_key, exc) from exc + except ssl_errors as exc: + raise ClientConnectorSSLError(req.connection_key, exc) from exc + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + raise client_error(req.connection_key, exc) from exc + + async def _wrap_existing_connection( self, *args: Any, req: ClientRequest, @@ -1121,13 +1216,11 @@ async def _start_tls_connection( ) -> Tuple[asyncio.BaseTransport, ResponseHandler]: """Wrap the raw TCP transport with TLS.""" tls_proto = self._factory() # Create a brand new proto for TLS - - # Safety of the `cast()` call here is based on the fact that - # internally `_get_ssl_context()` only returns `None` when - # `req.is_ssl()` evaluates to `False` which is never gonna happen - # in this code path. Of course, it's rather fragile - # maintainability-wise but this is to be solved separately. - sslcontext = cast(ssl.SSLContext, self._get_ssl_context(req)) + sslcontext = self._get_ssl_context(req) + if TYPE_CHECKING: + # _start_tls_connection is unreachable in the current code path + # if sslcontext is None. + assert sslcontext is not None try: async with ceil_timeout( @@ -1176,6 +1269,27 @@ async def _start_tls_connection( return tls_transport, tls_proto + def _convert_hosts_to_addr_infos( + self, hosts: List[ResolveResult] + ) -> List[aiohappyeyeballs.AddrInfoType]: + """Converts the list of hosts to a list of addr_infos. + + The list of hosts is the result of a DNS lookup. The list of + addr_infos is the result of a call to `socket.getaddrinfo()`. + """ + addr_infos: List[aiohappyeyeballs.AddrInfoType] = [] + for hinfo in hosts: + host = hinfo["host"] + is_ipv6 = ":" in host + family = socket.AF_INET6 if is_ipv6 else socket.AF_INET + if self._family and self._family != family: + continue + addr = (host, hinfo["port"], 0, 0) if is_ipv6 else (host, hinfo["port"]) + addr_infos.append( + (family, socket.SOCK_STREAM, socket.IPPROTO_TCP, "", addr) + ) + return addr_infos + async def _create_direct_connection( self, req: ClientRequest, @@ -1206,39 +1320,30 @@ async def _create_direct_connection( raise # in case of proxy it is not ClientProxyConnectionError # it is problem of resolving proxy ip itself - raise ClientConnectorError(req.connection_key, exc) from exc + raise ClientConnectorDNSError(req.connection_key, exc) from exc last_exc: Optional[Exception] = None - - for hinfo in hosts: - host = hinfo["host"] - port = hinfo["port"] - + addr_infos = self._convert_hosts_to_addr_infos(hosts) + while addr_infos: # Strip trailing dots, certificates contain FQDN without dots. # See https://github.com/aio-libs/aiohttp/issues/3636 server_hostname = ( - (req.server_hostname or hinfo["hostname"]).rstrip(".") - if sslcontext - else None + (req.server_hostname or host).rstrip(".") if sslcontext else None ) try: transp, proto = await self._wrap_create_connection( self._factory, - host, - port, timeout=timeout, ssl=sslcontext, - family=hinfo["family"], - proto=hinfo["proto"], - flags=hinfo["flags"], + addr_infos=addr_infos, server_hostname=server_hostname, - local_addr=self._local_addr, req=req, client_error=client_error, ) - except ClientConnectorError as exc: + except (ClientConnectorError, asyncio.TimeoutError) as exc: last_exc = exc + aiohappyeyeballs.pop_addr_infos_interleave(addr_infos, self._interleave) continue if req.is_ssl() and fingerprint: @@ -1249,6 +1354,10 @@ async def _create_direct_connection( if not self._cleanup_closed_disabled: self._cleanup_closed_transports.append(transp) last_exc = exc + # Remove the bad peer from the list of addr_infos + sock: socket.socket = transp.get_extra_info("socket") + bad_peer = sock.getpeername() + aiohappyeyeballs.remove_addr_infos(addr_infos, bad_peer) continue return transp, proto @@ -1367,7 +1476,7 @@ async def _create_proxy_connection( if not runtime_has_start_tls: # HTTP proxy with support for upgrade to HTTPS sslcontext = self._get_ssl_context(req) - return await self._wrap_create_connection( + return await self._wrap_existing_connection( self._factory, timeout=timeout, ssl=sslcontext, @@ -1401,6 +1510,8 @@ class UnixConnector(BaseConnector): loop - Optional event loop. """ + allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"unix"}) + def __init__( self, path: str, @@ -1457,6 +1568,8 @@ class NamedPipeConnector(BaseConnector): loop - Optional event loop. """ + allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"npipe"}) + def __init__( self, path: str, diff --git a/aiohttp/cookiejar.py b/aiohttp/cookiejar.py index a348f11..c78d5fa 100644 --- a/aiohttp/cookiejar.py +++ b/aiohttp/cookiejar.py @@ -2,6 +2,8 @@ import calendar import contextlib import datetime +import heapq +import itertools import os # noqa import pathlib import pickle @@ -9,8 +11,7 @@ import time from collections import defaultdict from http.cookies import BaseCookie, Morsel, SimpleCookie -from math import ceil -from typing import ( # noqa +from typing import ( DefaultDict, Dict, Iterable, @@ -35,6 +36,15 @@ CookieItem = Union[str, "Morsel[str]"] +# We cache these string methods here as their use is in performance critical code. +_FORMAT_PATH = "{}/{}".format +_FORMAT_DOMAIN_REVERSED = "{1}.{0}".format + +# The minimum number of scheduled cookie expirations before we start cleaning up +# the expiration heap. This is a performance optimization to avoid cleaning up the +# heap too often when there are only a few scheduled expirations. +_MIN_SCHEDULED_COOKIE_EXPIRATION = 100 + class CookieJar(AbstractCookieJar): """Implements cookie storage adhering to RFC 6265.""" @@ -85,6 +95,9 @@ def __init__( self._cookies: DefaultDict[Tuple[str, str], SimpleCookie] = defaultdict( SimpleCookie ) + self._morsel_cache: DefaultDict[Tuple[str, str], Dict[str, Morsel[str]]] = ( + defaultdict(dict) + ) self._host_only_cookies: Set[Tuple[str, str]] = set() self._unsafe = unsafe self._quote_cookie = quote_cookie @@ -100,7 +113,7 @@ def __init__( for url in treat_as_secure_origin ] self._treat_as_secure_origin = treat_as_secure_origin - self._next_expiration: float = ceil(time.time()) + self._expire_heap: List[Tuple[float, Tuple[str, str, str]]] = [] self._expirations: Dict[Tuple[str, str, str], float] = {} def save(self, file_path: PathLike) -> None: @@ -115,34 +128,26 @@ def load(self, file_path: PathLike) -> None: def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: if predicate is None: - self._next_expiration = ceil(time.time()) + self._expire_heap.clear() self._cookies.clear() + self._morsel_cache.clear() self._host_only_cookies.clear() self._expirations.clear() return - to_del = [] now = time.time() - for (domain, path), cookie in self._cookies.items(): - for name, morsel in cookie.items(): - key = (domain, path, name) - if ( - key in self._expirations and self._expirations[key] <= now - ) or predicate(morsel): - to_del.append(key) - - for domain, path, name in to_del: - self._host_only_cookies.discard((domain, name)) - key = (domain, path, name) - if key in self._expirations: - del self._expirations[(domain, path, name)] - self._cookies[(domain, path)].pop(name, None) - - self._next_expiration = ( - min(*self._expirations.values(), self.SUB_MAX_TIME) + 1 - if self._expirations - else self.MAX_TIME - ) + to_del = [ + key + for (domain, path), cookie in self._cookies.items() + for name, morsel in cookie.items() + if ( + (key := (domain, path, name)) in self._expirations + and self._expirations[key] <= now + ) + or predicate(morsel) + ] + if to_del: + self._delete_cookies(to_del) def clear_domain(self, domain: str) -> None: self.clear(lambda x: self._is_domain_match(domain, x["domain"])) @@ -153,14 +158,70 @@ def __iter__(self) -> "Iterator[Morsel[str]]": yield from val.values() def __len__(self) -> int: - return sum(1 for i in self) + """Return number of cookies. + + This function does not iterate self to avoid unnecessary expiration + checks. + """ + return sum(len(cookie.values()) for cookie in self._cookies.values()) def _do_expiration(self) -> None: - self.clear(lambda x: False) + """Remove expired cookies.""" + if not (expire_heap_len := len(self._expire_heap)): + return + + # If the expiration heap grows larger than the number expirations + # times two, we clean it up to avoid keeping expired entries in + # the heap and consuming memory. We guard this with a minimum + # threshold to avoid cleaning up the heap too often when there are + # only a few scheduled expirations. + if ( + expire_heap_len > _MIN_SCHEDULED_COOKIE_EXPIRATION + and expire_heap_len > len(self._expirations) * 2 + ): + # Remove any expired entries from the expiration heap + # that do not match the expiration time in the expirations + # as it means the cookie has been re-added to the heap + # with a different expiration time. + self._expire_heap = [ + entry + for entry in self._expire_heap + if self._expirations.get(entry[1]) == entry[0] + ] + heapq.heapify(self._expire_heap) + + now = time.time() + to_del: List[Tuple[str, str, str]] = [] + # Find any expired cookies and add them to the to-delete list + while self._expire_heap: + when, cookie_key = self._expire_heap[0] + if when > now: + break + heapq.heappop(self._expire_heap) + # Check if the cookie hasn't been re-added to the heap + # with a different expiration time as it will be removed + # later when it reaches the top of the heap and its + # expiration time is met. + if self._expirations.get(cookie_key) == when: + to_del.append(cookie_key) + + if to_del: + self._delete_cookies(to_del) + + def _delete_cookies(self, to_del: List[Tuple[str, str, str]]) -> None: + for domain, path, name in to_del: + self._host_only_cookies.discard((domain, name)) + self._cookies[(domain, path)].pop(name, None) + self._morsel_cache[(domain, path)].pop(name, None) + self._expirations.pop((domain, path, name), None) def _expire_cookie(self, when: float, domain: str, path: str, name: str) -> None: - self._next_expiration = min(self._next_expiration, when) - self._expirations[(domain, path, name)] = when + cookie_key = (domain, path, name) + if self._expirations.get(cookie_key) == when: + # Avoid adding duplicates to the heap + return + heapq.heappush(self._expire_heap, (when, cookie_key)) + self._expirations[cookie_key] = when def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None: """Update cookies.""" @@ -182,7 +243,7 @@ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> No domain = cookie["domain"] # ignore domains with trailing dots - if domain.endswith("."): + if domain and domain[-1] == ".": domain = "" del cookie["domain"] @@ -192,7 +253,7 @@ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> No self._host_only_cookies.add((hostname, name)) domain = cookie["domain"] = hostname - if domain.startswith("."): + if domain and domain[0] == ".": # Remove leading dot domain = domain[1:] cookie["domain"] = domain @@ -202,7 +263,7 @@ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> No continue path = cookie["path"] - if not path or not path.startswith("/"): + if not path or path[0] != "/": # Set the cookie's path to the response path path = response_url.path if not path.startswith("/"): @@ -211,9 +272,9 @@ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> No # Cut everything from the last slash to the end path = "/" + path[1 : path.rfind("/")] cookie["path"] = path + path = path.rstrip("/") - max_age = cookie["max-age"] - if max_age: + if max_age := cookie["max-age"]: try: delta_seconds = int(max_age) max_age_expiration = min(time.time() + delta_seconds, self.MAX_TIME) @@ -221,16 +282,18 @@ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> No except ValueError: cookie["max-age"] = "" - else: - expires = cookie["expires"] - if expires: - expire_time = self._parse_date(expires) - if expire_time: - self._expire_cookie(expire_time, domain, path, name) - else: - cookie["expires"] = "" + elif expires := cookie["expires"]: + if expire_time := self._parse_date(expires): + self._expire_cookie(expire_time, domain, path, name) + else: + cookie["expires"] = "" - self._cookies[(domain, path)][name] = cookie + key = (domain, path) + if self._cookies[key].get(name) != cookie: + # Don't blow away the cache if the same + # cookie gets set again + self._cookies[key][name] = cookie + self._morsel_cache[key].pop(name, None) self._do_expiration() @@ -256,36 +319,52 @@ def filter_cookies(self, request_url: URL = URL()) -> "BaseCookie[str]": request_origin = request_url.origin() is_not_secure = request_origin not in self._treat_as_secure_origin - # Point 2: https://www.rfc-editor.org/rfc/rfc6265.html#section-5.4 - for cookie in sorted(self, key=lambda c: len(c["path"])): - name = cookie.key - domain = cookie["domain"] + # Send shared cookie + for c in self._cookies[("", "")].values(): + filtered[c.key] = c.value - # Send shared cookies - if not domain: - filtered[name] = cookie.value - continue + if is_ip_address(hostname): + if not self._unsafe: + return filtered + domains: Iterable[str] = (hostname,) + else: + # Get all the subdomains that might match a cookie (e.g. "foo.bar.com", "bar.com", "com") + domains = itertools.accumulate( + reversed(hostname.split(".")), _FORMAT_DOMAIN_REVERSED + ) - if not self._unsafe and is_ip_address(hostname): - continue + # Get all the path prefixes that might match a cookie (e.g. "", "/foo", "/foo/bar") + paths = itertools.accumulate(request_url.path.split("/"), _FORMAT_PATH) + # Create every combination of (domain, path) pairs. + pairs = itertools.product(domains, paths) - if (domain, name) in self._host_only_cookies: - if domain != hostname: + path_len = len(request_url.path) + # Point 2: https://www.rfc-editor.org/rfc/rfc6265.html#section-5.4 + for p in pairs: + for name, cookie in self._cookies[p].items(): + domain = cookie["domain"] + + if (domain, name) in self._host_only_cookies and domain != hostname: continue - elif not self._is_domain_match(domain, hostname): - continue - if not self._is_path_match(request_url.path, cookie["path"]): - continue + # Skip edge case when the cookie has a trailing slash but request doesn't. + if len(cookie["path"]) > path_len: + continue - if is_not_secure and cookie["secure"]: - continue + if is_not_secure and cookie["secure"]: + continue + + # We already built the Morsel so reuse it here + if name in self._morsel_cache[p]: + filtered[name] = self._morsel_cache[p][name] + continue - # It's critical we use the Morsel so the coded_value - # (based on cookie version) is preserved - mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel())) - mrsl_val.set(cookie.key, cookie.value, cookie.coded_value) - filtered[name] = mrsl_val + # It's critical we use the Morsel so the coded_value + # (based on cookie version) is preserved + mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel())) + mrsl_val.set(cookie.key, cookie.value, cookie.coded_value) + self._morsel_cache[p][name] = mrsl_val + filtered[name] = mrsl_val return filtered @@ -305,25 +384,6 @@ def _is_domain_match(domain: str, hostname: str) -> bool: return not is_ip_address(hostname) - @staticmethod - def _is_path_match(req_path: str, cookie_path: str) -> bool: - """Implements path matching adhering to RFC 6265.""" - if not req_path.startswith("/"): - req_path = "/" - - if req_path == cookie_path: - return True - - if not req_path.startswith(cookie_path): - return False - - if cookie_path.endswith("/"): - return True - - non_matching = req_path[len(cookie_path) :] - - return non_matching.startswith("/") - @classmethod def _parse_date(cls, date_str: str) -> Optional[int]: """Implements date string parsing adhering to RFC 6265.""" diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index 284033b..6ee7078 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -14,7 +14,6 @@ import re import sys import time -import warnings import weakref from collections import namedtuple from contextlib import suppress @@ -35,7 +34,6 @@ List, Mapping, Optional, - Pattern, Protocol, Tuple, Type, @@ -52,7 +50,7 @@ from yarl import URL from . import hdrs -from .log import client_logger, internal_logger +from .log import client_logger if sys.version_info >= (3, 11): import asyncio as async_timeout @@ -165,9 +163,11 @@ def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth" """Create BasicAuth from url.""" if not isinstance(url, URL): raise TypeError("url should be yarl.URL instance") - if url.user is None: + # Check raw_user and raw_password first as yarl is likely + # to already have these values parsed from the netloc in the cache. + if url.raw_user is None and url.raw_password is None: return None - return cls(url.user, url.password or "", encoding=encoding) + return cls(url.user or "", url.password or "", encoding=encoding) def encode(self) -> str: """Encode credentials.""" @@ -176,11 +176,12 @@ def encode(self) -> str: def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]: - auth = BasicAuth.from_url(url) - if auth is None: + """Remove user and password from URL if present and return BasicAuth object.""" + # Check raw_user and raw_password first as yarl is likely + # to already have these values parsed from the netloc in the cache. + if url.raw_user is None and url.raw_password is None: return url, None - else: - return url.with_user(None), auth + return url.with_user(None), BasicAuth(url.user or "", url.password or "") def netrc_from_env() -> Optional[netrc.netrc]: @@ -287,38 +288,6 @@ def proxies_from_env() -> Dict[str, ProxyInfo]: return ret -def current_task( - loop: Optional[asyncio.AbstractEventLoop] = None, -) -> "Optional[asyncio.Task[Any]]": - return asyncio.current_task(loop=loop) - - -def get_running_loop( - loop: Optional[asyncio.AbstractEventLoop] = None, -) -> asyncio.AbstractEventLoop: - if loop is None: - loop = asyncio.get_event_loop() - if not loop.is_running(): - warnings.warn( - "The object should be created within an async function", - DeprecationWarning, - stacklevel=3, - ) - if loop.get_debug(): - internal_logger.warning( - "The object should be created within an async function", stack_info=True - ) - return loop - - -def isasyncgenfunction(obj: Any) -> bool: - func = getattr(inspect, "isasyncgenfunction", None) - if func is not None: - return func(obj) # type: ignore[no-any-return] - else: - return False - - def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]: """Get a permitted proxy for the given URL from the env.""" if url.host is not None and proxy_bypass(url.host): @@ -504,44 +473,51 @@ def __set__(self, inst: _TSelf[_T], value: _T) -> None: except ImportError: pass -_ipv4_pattern = ( - r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}" - r"(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$" -) -_ipv6_pattern = ( - r"^(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}" - r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}$)(([0-9A-F]{1,4}:){0,5}|:)" - r"((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})" - r"(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}" - r"(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])|(?:[A-F0-9]{1,4}:){7}" - r"[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}$)" - r"(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}" - r":|:(:[A-F0-9]{1,4}){7})$" -) -_ipv4_regex = re.compile(_ipv4_pattern) -_ipv6_regex = re.compile(_ipv6_pattern, flags=re.IGNORECASE) -_ipv4_regexb = re.compile(_ipv4_pattern.encode("ascii")) -_ipv6_regexb = re.compile(_ipv6_pattern.encode("ascii"), flags=re.IGNORECASE) +def is_ipv4_address(host: Optional[Union[str, bytes]]) -> bool: + """Check if host looks like an IPv4 address. + + This function does not validate that the format is correct, only that + the host is a str or bytes, and its all numeric. -def _is_ip_address( - regex: Pattern[str], regexb: Pattern[bytes], host: Optional[Union[str, bytes]] -) -> bool: - if host is None: + This check is only meant as a heuristic to ensure that + a host is not a domain name. + """ + if not host: return False + # For a host to be an ipv4 address, it must be all numeric. if isinstance(host, str): - return bool(regex.match(host)) - elif isinstance(host, (bytes, bytearray, memoryview)): - return bool(regexb.match(host)) - else: - raise TypeError(f"{host} [{type(host)}] is not a str or bytes") + return host.replace(".", "").isdigit() + if isinstance(host, (bytes, bytearray, memoryview)): + return host.decode("ascii").replace(".", "").isdigit() + raise TypeError(f"{host} [{type(host)}] is not a str or bytes") -is_ipv4_address = functools.partial(_is_ip_address, _ipv4_regex, _ipv4_regexb) -is_ipv6_address = functools.partial(_is_ip_address, _ipv6_regex, _ipv6_regexb) +def is_ipv6_address(host: Optional[Union[str, bytes]]) -> bool: + """Check if host looks like an IPv6 address. + + This function does not validate that the format is correct, only that + the host contains a colon and that it is a str or bytes. + + This check is only meant as a heuristic to ensure that + a host is not a domain name. + """ + if not host: + return False + # The host must contain a colon to be an IPv6 address. + if isinstance(host, str): + return ":" in host + if isinstance(host, (bytes, bytearray, memoryview)): + return b":" in host + raise TypeError(f"{host} [{type(host)}] is not a str or bytes") def is_ip_address(host: Optional[Union[str, bytes, bytearray, memoryview]]) -> bool: + """Check if host looks like an IP Address. + + This check is only meant as a heuristic to ensure that + a host is not a domain name. + """ return is_ipv4_address(host) or is_ipv6_address(host) @@ -619,17 +595,30 @@ def call_later( loop: asyncio.AbstractEventLoop, timeout_ceil_threshold: float = 5, ) -> Optional[asyncio.TimerHandle]: - if timeout is not None and timeout > 0: - when = loop.time() + timeout - if timeout > timeout_ceil_threshold: - when = ceil(when) - return loop.call_at(when, cb) - return None + if timeout is None or timeout <= 0: + return None + now = loop.time() + when = calculate_timeout_when(now, timeout, timeout_ceil_threshold) + return loop.call_at(when, cb) + + +def calculate_timeout_when( + loop_time: float, + timeout: float, + timeout_ceiling_threshold: float, +) -> float: + """Calculate when to execute a timeout.""" + when = loop_time + timeout + if timeout > timeout_ceiling_threshold: + return ceil(when) + return when class TimeoutHandle: """Timeout handle""" + __slots__ = ("_timeout", "_loop", "_ceil_threshold", "_callbacks") + def __init__( self, loop: asyncio.AbstractEventLoop, @@ -651,7 +640,7 @@ def register( def close(self) -> None: self._callbacks.clear() - def start(self) -> Optional[asyncio.Handle]: + def start(self) -> Optional[asyncio.TimerHandle]: timeout = self._timeout if timeout is not None and timeout > 0: when = self._loop.time() + timeout @@ -678,11 +667,17 @@ def __call__(self) -> None: class BaseTimerContext(ContextManager["BaseTimerContext"]): + + __slots__ = () + def assert_timeout(self) -> None: """Raise TimeoutError if timeout has been exceeded.""" class TimerNoop(BaseTimerContext): + + __slots__ = () + def __enter__(self) -> BaseTimerContext: return self @@ -698,10 +693,13 @@ def __exit__( class TimerContext(BaseTimerContext): """Low resolution timeout context manager""" + __slots__ = ("_loop", "_tasks", "_cancelled", "_cancelling") + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: self._loop = loop self._tasks: List[asyncio.Task[Any]] = [] self._cancelled = False + self._cancelling = 0 def assert_timeout(self) -> None: """Raise TimeoutError if timer has already been cancelled.""" @@ -709,13 +707,18 @@ def assert_timeout(self) -> None: raise asyncio.TimeoutError from None def __enter__(self) -> BaseTimerContext: - task = current_task(loop=self._loop) - + task = asyncio.current_task(loop=self._loop) if task is None: raise RuntimeError( "Timeout context manager should be used " "inside a task" ) + if sys.version_info >= (3, 11): + # Remember if the task was already cancelling + # so when we __exit__ we can decide if we should + # raise asyncio.TimeoutError or let the cancellation propagate + self._cancelling = task.cancelling() + if self._cancelled: raise asyncio.TimeoutError from None @@ -728,11 +731,22 @@ def __exit__( exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> Optional[bool]: + enter_task: Optional[asyncio.Task[Any]] = None if self._tasks: - self._tasks.pop() + enter_task = self._tasks.pop() if exc_type is asyncio.CancelledError and self._cancelled: - raise asyncio.TimeoutError from None + assert enter_task is not None + # The timeout was hit, and the task was cancelled + # so we need to uncancel the last task that entered the context manager + # since the cancellation should not leak out of the context manager + if sys.version_info >= (3, 11): + # If the task was already cancelling don't raise + # asyncio.TimeoutError and instead return None + # to allow the cancellation to propagate + if enter_task.uncancel() > self._cancelling: + return None + raise asyncio.TimeoutError from exc_val return None def timeout(self) -> None: @@ -749,7 +763,7 @@ def ceil_timeout( if delay is None or delay <= 0: return async_timeout.timeout(None) - loop = get_running_loop() + loop = asyncio.get_running_loop() now = loop.time() when = now + delay if delay > ceil_threshold: @@ -784,7 +798,8 @@ def content_type(self) -> str: raw = self._headers.get(hdrs.CONTENT_TYPE) if self._stored_content_type != raw: self._parse_content_type(raw) - return self._content_type # type: ignore[return-value] + assert self._content_type is not None + return self._content_type @property def charset(self) -> Optional[str]: @@ -792,17 +807,14 @@ def charset(self) -> Optional[str]: raw = self._headers.get(hdrs.CONTENT_TYPE) if self._stored_content_type != raw: self._parse_content_type(raw) - return self._content_dict.get("charset") # type: ignore[union-attr] + assert self._content_dict is not None + return self._content_dict.get("charset") @property def content_length(self) -> Optional[int]: """The value of Content-Length HTTP header.""" content_length = self._headers.get(hdrs.CONTENT_LENGTH) - - if content_length is not None: - return int(content_length) - else: - return None + return None if content_length is None else int(content_length) def set_result(fut: "asyncio.Future[_T]", result: _T) -> None: @@ -818,8 +830,7 @@ def set_exception( self, exc: BaseException, exc_cause: BaseException = ..., - ) -> None: - ... # pragma: no cover + ) -> None: ... # pragma: no cover def set_exception( @@ -905,12 +916,10 @@ def __init_subclass__(cls) -> None: ) @overload # type: ignore[override] - def __getitem__(self, key: AppKey[_T]) -> _T: - ... + def __getitem__(self, key: AppKey[_T]) -> _T: ... @overload - def __getitem__(self, key: str) -> Any: - ... + def __getitem__(self, key: str) -> Any: ... def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any: for mapping in self._maps: @@ -921,16 +930,13 @@ def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any: raise KeyError(key) @overload # type: ignore[override] - def get(self, key: AppKey[_T], default: _S) -> Union[_T, _S]: - ... + def get(self, key: AppKey[_T], default: _S) -> Union[_T, _S]: ... @overload - def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]: - ... + def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]: ... @overload - def get(self, key: str, default: Any = ...) -> Any: - ... + def get(self, key: str, default: Any = ...) -> Any: ... def get(self, key: Union[str, AppKey[_T]], default: Any = None) -> Any: try: @@ -993,6 +999,7 @@ def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]: return None +@functools.lru_cache def must_be_empty_body(method: str, code: int) -> bool: """Check if a request must return an empty body.""" return ( diff --git a/aiohttp/http_exceptions.py b/aiohttp/http_exceptions.py index 72eac3a..c43ee0d 100644 --- a/aiohttp/http_exceptions.py +++ b/aiohttp/http_exceptions.py @@ -1,6 +1,5 @@ """Low-level http related exceptions.""" - from textwrap import indent from typing import Optional, Union diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index 0135119..686a2d0 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -47,7 +47,6 @@ TransferEncodingError, ) from .http_writer import HttpVersion, HttpVersion10 -from .log import internal_logger from .streams import EMPTY_PAYLOAD, StreamReader from .typedefs import RawHeaders @@ -249,7 +248,6 @@ def __init__( timer: Optional[BaseTimerContext] = None, code: Optional[int] = None, method: Optional[str] = None, - readall: bool = False, payload_exception: Optional[Type[BaseException]] = None, response_with_body: bool = True, read_until_eof: bool = False, @@ -263,7 +261,6 @@ def __init__( self.timer = timer self.code = code self.method = method - self.readall = readall self.payload_exception = payload_exception self.response_with_body = response_with_body self.read_until_eof = read_until_eof @@ -280,8 +277,10 @@ def __init__( ) @abc.abstractmethod - def parse_message(self, lines: List[bytes]) -> _MsgT: - pass + def parse_message(self, lines: List[bytes]) -> _MsgT: ... + + @abc.abstractmethod + def _is_chunked_te(self, te: str) -> bool: ... def feed_eof(self) -> Optional[_MsgT]: if self._payload_parser is not None: @@ -318,6 +317,7 @@ def feed_data( start_pos = 0 loop = self.loop + should_close = False while start_pos < data_len: # read HTTP message (request/response line + headers), \r\n\r\n @@ -330,6 +330,9 @@ def feed_data( continue if pos >= start_pos: + if should_close: + raise BadHttpMessage("Data after `Connection: close`") + # line found line = data[start_pos:pos] if SEP == b"\n": # For lax response parsing @@ -393,7 +396,6 @@ def get_content_length() -> Optional[int]: method=method, compression=msg.compression, code=self.code, - readall=self.readall, response_with_body=self.response_with_body, auto_decompress=self._auto_decompress, lax=self.lax, @@ -413,7 +415,6 @@ def get_content_length() -> Optional[int]: payload, method=msg.method, compression=msg.compression, - readall=True, auto_decompress=self._auto_decompress, lax=self.lax, ) @@ -431,7 +432,6 @@ def get_content_length() -> Optional[int]: method=method, compression=msg.compression, code=self.code, - readall=True, response_with_body=self.response_with_body, auto_decompress=self._auto_decompress, lax=self.lax, @@ -442,6 +442,7 @@ def get_content_length() -> Optional[int]: payload = EMPTY_PAYLOAD messages.append((msg, payload)) + should_close = msg.should_close else: self._tail = data[start_pos:] data = EMPTY @@ -543,10 +544,8 @@ def parse_headers( # chunking te = headers.get(hdrs.TRANSFER_ENCODING) if te is not None: - if "chunked" == te.lower(): + if self._is_chunked_te(te): chunked = True - else: - raise BadHttpMessage("Request has invalid `Transfer-Encoding`") if hdrs.CONTENT_LENGTH in headers: raise BadHttpMessage( @@ -656,6 +655,12 @@ def parse_message(self, lines: List[bytes]) -> RawRequestMessage: url, ) + def _is_chunked_te(self, te: str) -> bool: + if te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked": + return True + # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.3 + raise BadHttpMessage("Request has invalid `Transfer-Encoding`") + class HttpResponseParser(HttpParser[RawResponseMessage]): """Read response status line and headers. @@ -741,6 +746,10 @@ def parse_message(self, lines: List[bytes]) -> RawResponseMessage: chunked, ) + def _is_chunked_te(self, te: str) -> bool: + # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.2 + return te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked" + class HttpPayloadParser: def __init__( @@ -751,13 +760,12 @@ def __init__( compression: Optional[str] = None, code: Optional[int] = None, method: Optional[str] = None, - readall: bool = False, response_with_body: bool = True, auto_decompress: bool = True, lax: bool = False, ) -> None: self._length = 0 - self._type = ParseState.PARSE_NONE + self._type = ParseState.PARSE_UNTIL_EOF self._chunk = ChunkState.PARSE_CHUNKED_SIZE self._chunk_size = 0 self._chunk_tail = b"" @@ -779,7 +787,6 @@ def __init__( self._type = ParseState.PARSE_NONE real_payload.feed_eof() self.done = True - elif chunked: self._type = ParseState.PARSE_CHUNKED elif length is not None: @@ -788,16 +795,6 @@ def __init__( if self._length == 0: real_payload.feed_eof() self.done = True - else: - if readall and code != 204: - self._type = ParseState.PARSE_UNTIL_EOF - elif method in ("PUT", "POST"): - internal_logger.warning( # pragma: no cover - "Content-Length or Transfer-Encoding header is required" - ) - self._type = ParseState.PARSE_NONE - real_payload.feed_eof() - self.done = True self.payload = real_payload @@ -888,13 +885,13 @@ def feed_data( self._chunk_size = 0 self.payload.feed_data(chunk[:required], required) chunk = chunk[required:] - if self._lax and chunk.startswith(b"\r"): - chunk = chunk[1:] self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF self.payload.end_http_chunk_receiving() # toss the CRLF at the end of the chunk if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF: + if self._lax and chunk.startswith(b"\r"): + chunk = chunk[1:] if chunk[: len(SEP)] == SEP: chunk = chunk[len(SEP) :] self._chunk = ChunkState.PARSE_CHUNKED_SIZE diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index 39f2e4a..fb00ebc 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -8,6 +8,7 @@ import sys import zlib from enum import IntEnum +from functools import partial from struct import Struct from typing import ( Any, @@ -24,6 +25,7 @@ ) from .base_protocol import BaseProtocol +from .client_exceptions import ClientConnectionResetError from .compression_utils import ZLibCompressor, ZLibDecompressor from .helpers import NO_EXTENSIONS, set_exception from .streams import DataQueue @@ -93,6 +95,14 @@ class WSMsgType(IntEnum): error = ERROR +MESSAGE_TYPES_WITH_CONTENT: Final = frozenset( + { + WSMsgType.BINARY, + WSMsgType.TEXT, + WSMsgType.CONTINUATION, + } +) + WS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11" @@ -103,8 +113,10 @@ class WSMsgType(IntEnum): PACK_LEN2 = Struct("!BBH").pack PACK_LEN3 = Struct("!BBQ").pack PACK_CLOSE_CODE = Struct("!H").pack +PACK_RANDBITS = Struct("!L").pack MSG_SIZE: Final[int] = 2**14 DEFAULT_LIMIT: Final[int] = 2**16 +MASK_LEN: Final[int] = 4 class WSMessage(NamedTuple): @@ -294,7 +306,7 @@ def __init__( self._frame_opcode: Optional[int] = None self._frame_payload = bytearray() - self._tail = b"" + self._tail: bytes = b"" self._has_mask = False self._frame_mask: Optional[bytes] = None self._payload_length = 0 @@ -311,17 +323,101 @@ def feed_data(self, data: bytes) -> Tuple[bool, bytes]: return True, data try: - return self._feed_data(data) + self._feed_data(data) except Exception as exc: self._exc = exc set_exception(self.queue, exc) return True, b"" - def _feed_data(self, data: bytes) -> Tuple[bool, bytes]: + return False, b"" + + def _feed_data(self, data: bytes) -> None: for fin, opcode, payload, compressed in self.parse_frame(data): - if compressed and not self._decompressobj: - self._decompressobj = ZLibDecompressor(suppress_deflate_header=True) - if opcode == WSMsgType.CLOSE: + if opcode in MESSAGE_TYPES_WITH_CONTENT: + # load text/binary + is_continuation = opcode == WSMsgType.CONTINUATION + if not fin: + # got partial frame payload + if not is_continuation: + self._opcode = opcode + self._partial += payload + if self._max_msg_size and len(self._partial) >= self._max_msg_size: + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + "Message size {} exceeds limit {}".format( + len(self._partial), self._max_msg_size + ), + ) + continue + + has_partial = bool(self._partial) + if is_continuation: + if self._opcode is None: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Continuation frame for non started message", + ) + opcode = self._opcode + self._opcode = None + # previous frame was non finished + # we should get continuation opcode + elif has_partial: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "The opcode in non-fin frame is expected " + "to be zero, got {!r}".format(opcode), + ) + + if has_partial: + assembled_payload = self._partial + payload + self._partial.clear() + else: + assembled_payload = payload + + if self._max_msg_size and len(assembled_payload) >= self._max_msg_size: + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + "Message size {} exceeds limit {}".format( + len(assembled_payload), self._max_msg_size + ), + ) + + # Decompress process must to be done after all packets + # received. + if compressed: + if not self._decompressobj: + self._decompressobj = ZLibDecompressor( + suppress_deflate_header=True + ) + payload_merged = self._decompressobj.decompress_sync( + assembled_payload + _WS_DEFLATE_TRAILING, self._max_msg_size + ) + if self._decompressobj.unconsumed_tail: + left = len(self._decompressobj.unconsumed_tail) + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + "Decompressed message size {} exceeds limit {}".format( + self._max_msg_size + left, self._max_msg_size + ), + ) + else: + payload_merged = bytes(assembled_payload) + + if opcode == WSMsgType.TEXT: + try: + text = payload_merged.decode("utf-8") + except UnicodeDecodeError as exc: + raise WebSocketError( + WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" + ) from exc + + self.queue.feed_data(WSMessage(WSMsgType.TEXT, text, ""), len(text)) + continue + + self.queue.feed_data( + WSMessage(WSMsgType.BINARY, payload_merged, ""), len(payload_merged) + ) + elif opcode == WSMsgType.CLOSE: if len(payload) >= 2: close_code = UNPACK_CLOSE_CODE(payload[:2])[0] if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES: @@ -356,241 +452,145 @@ def _feed_data(self, data: bytes) -> Tuple[bool, bytes]: WSMessage(WSMsgType.PONG, payload, ""), len(payload) ) - elif ( - opcode not in (WSMsgType.TEXT, WSMsgType.BINARY) - and self._opcode is None - ): + else: raise WebSocketError( WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}" ) - else: - # load text/binary - if not fin: - # got partial frame payload - if opcode != WSMsgType.CONTINUATION: - self._opcode = opcode - self._partial.extend(payload) - if self._max_msg_size and len(self._partial) >= self._max_msg_size: - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - "Message size {} exceeds limit {}".format( - len(self._partial), self._max_msg_size - ), - ) - else: - # previous frame was non finished - # we should get continuation opcode - if self._partial: - if opcode != WSMsgType.CONTINUATION: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "The opcode in non-fin frame is expected " - "to be zero, got {!r}".format(opcode), - ) - - if opcode == WSMsgType.CONTINUATION: - assert self._opcode is not None - opcode = self._opcode - self._opcode = None - - self._partial.extend(payload) - if self._max_msg_size and len(self._partial) >= self._max_msg_size: - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - "Message size {} exceeds limit {}".format( - len(self._partial), self._max_msg_size - ), - ) - - # Decompress process must to be done after all packets - # received. - if compressed: - assert self._decompressobj is not None - self._partial.extend(_WS_DEFLATE_TRAILING) - payload_merged = self._decompressobj.decompress_sync( - self._partial, self._max_msg_size - ) - if self._decompressobj.unconsumed_tail: - left = len(self._decompressobj.unconsumed_tail) - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - "Decompressed message size {} exceeds limit {}".format( - self._max_msg_size + left, self._max_msg_size - ), - ) - else: - payload_merged = bytes(self._partial) - - self._partial.clear() - - if opcode == WSMsgType.TEXT: - try: - text = payload_merged.decode("utf-8") - self.queue.feed_data( - WSMessage(WSMsgType.TEXT, text, ""), len(text) - ) - except UnicodeDecodeError as exc: - raise WebSocketError( - WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" - ) from exc - else: - self.queue.feed_data( - WSMessage(WSMsgType.BINARY, payload_merged, ""), - len(payload_merged), - ) - - return False, b"" def parse_frame( self, buf: bytes ) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]: """Return the next frame from the socket.""" - frames = [] + frames: List[Tuple[bool, Optional[int], bytearray, Optional[bool]]] = [] if self._tail: buf, self._tail = self._tail + buf, b"" - start_pos = 0 + start_pos: int = 0 buf_length = len(buf) while True: # read header - if self._state == WSParserState.READ_HEADER: - if buf_length - start_pos >= 2: - data = buf[start_pos : start_pos + 2] - start_pos += 2 - first_byte, second_byte = data - - fin = (first_byte >> 7) & 1 - rsv1 = (first_byte >> 6) & 1 - rsv2 = (first_byte >> 5) & 1 - rsv3 = (first_byte >> 4) & 1 - opcode = first_byte & 0xF - - # frame-fin = %x0 ; more frames of this message follow - # / %x1 ; final frame of this message - # frame-rsv1 = %x0 ; - # 1 bit, MUST be 0 unless negotiated otherwise - # frame-rsv2 = %x0 ; - # 1 bit, MUST be 0 unless negotiated otherwise - # frame-rsv3 = %x0 ; - # 1 bit, MUST be 0 unless negotiated otherwise - # - # Remove rsv1 from this test for deflate development - if rsv2 or rsv3 or (rsv1 and not self._compress): - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Received frame with non-zero reserved bits", - ) + if self._state is WSParserState.READ_HEADER: + if buf_length - start_pos < 2: + break + data = buf[start_pos : start_pos + 2] + start_pos += 2 + first_byte, second_byte = data + + fin = (first_byte >> 7) & 1 + rsv1 = (first_byte >> 6) & 1 + rsv2 = (first_byte >> 5) & 1 + rsv3 = (first_byte >> 4) & 1 + opcode = first_byte & 0xF + + # frame-fin = %x0 ; more frames of this message follow + # / %x1 ; final frame of this message + # frame-rsv1 = %x0 ; + # 1 bit, MUST be 0 unless negotiated otherwise + # frame-rsv2 = %x0 ; + # 1 bit, MUST be 0 unless negotiated otherwise + # frame-rsv3 = %x0 ; + # 1 bit, MUST be 0 unless negotiated otherwise + # + # Remove rsv1 from this test for deflate development + if rsv2 or rsv3 or (rsv1 and not self._compress): + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Received frame with non-zero reserved bits", + ) - if opcode > 0x7 and fin == 0: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Received fragmented control frame", - ) + if opcode > 0x7 and fin == 0: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Received fragmented control frame", + ) - has_mask = (second_byte >> 7) & 1 - length = second_byte & 0x7F + has_mask = (second_byte >> 7) & 1 + length = second_byte & 0x7F - # Control frames MUST have a payload - # length of 125 bytes or less - if opcode > 0x7 and length > 125: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Control frame payload cannot be " "larger than 125 bytes", - ) + # Control frames MUST have a payload + # length of 125 bytes or less + if opcode > 0x7 and length > 125: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Control frame payload cannot be " "larger than 125 bytes", + ) - # Set compress status if last package is FIN - # OR set compress status if this is first fragment - # Raise error if not first fragment with rsv1 = 0x1 - if self._frame_fin or self._compressed is None: - self._compressed = True if rsv1 else False - elif rsv1: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Received frame with non-zero reserved bits", - ) + # Set compress status if last package is FIN + # OR set compress status if this is first fragment + # Raise error if not first fragment with rsv1 = 0x1 + if self._frame_fin or self._compressed is None: + self._compressed = True if rsv1 else False + elif rsv1: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Received frame with non-zero reserved bits", + ) - self._frame_fin = bool(fin) - self._frame_opcode = opcode - self._has_mask = bool(has_mask) - self._payload_length_flag = length - self._state = WSParserState.READ_PAYLOAD_LENGTH - else: - break + self._frame_fin = bool(fin) + self._frame_opcode = opcode + self._has_mask = bool(has_mask) + self._payload_length_flag = length + self._state = WSParserState.READ_PAYLOAD_LENGTH # read payload length - if self._state == WSParserState.READ_PAYLOAD_LENGTH: - length = self._payload_length_flag - if length == 126: - if buf_length - start_pos >= 2: - data = buf[start_pos : start_pos + 2] - start_pos += 2 - length = UNPACK_LEN2(data)[0] - self._payload_length = length - self._state = ( - WSParserState.READ_PAYLOAD_MASK - if self._has_mask - else WSParserState.READ_PAYLOAD - ) - else: + if self._state is WSParserState.READ_PAYLOAD_LENGTH: + length_flag = self._payload_length_flag + if length_flag == 126: + if buf_length - start_pos < 2: break - elif length > 126: - if buf_length - start_pos >= 8: - data = buf[start_pos : start_pos + 8] - start_pos += 8 - length = UNPACK_LEN3(data)[0] - self._payload_length = length - self._state = ( - WSParserState.READ_PAYLOAD_MASK - if self._has_mask - else WSParserState.READ_PAYLOAD - ) - else: + data = buf[start_pos : start_pos + 2] + start_pos += 2 + self._payload_length = UNPACK_LEN2(data)[0] + elif length_flag > 126: + if buf_length - start_pos < 8: break + data = buf[start_pos : start_pos + 8] + start_pos += 8 + self._payload_length = UNPACK_LEN3(data)[0] else: - self._payload_length = length - self._state = ( - WSParserState.READ_PAYLOAD_MASK - if self._has_mask - else WSParserState.READ_PAYLOAD - ) + self._payload_length = length_flag + + self._state = ( + WSParserState.READ_PAYLOAD_MASK + if self._has_mask + else WSParserState.READ_PAYLOAD + ) # read payload mask - if self._state == WSParserState.READ_PAYLOAD_MASK: - if buf_length - start_pos >= 4: - self._frame_mask = buf[start_pos : start_pos + 4] - start_pos += 4 - self._state = WSParserState.READ_PAYLOAD - else: + if self._state is WSParserState.READ_PAYLOAD_MASK: + if buf_length - start_pos < 4: break + self._frame_mask = buf[start_pos : start_pos + 4] + start_pos += 4 + self._state = WSParserState.READ_PAYLOAD - if self._state == WSParserState.READ_PAYLOAD: + if self._state is WSParserState.READ_PAYLOAD: length = self._payload_length payload = self._frame_payload chunk_len = buf_length - start_pos if length >= chunk_len: self._payload_length = length - chunk_len - payload.extend(buf[start_pos:]) + payload += buf[start_pos:] start_pos = buf_length else: self._payload_length = 0 - payload.extend(buf[start_pos : start_pos + length]) + payload += buf[start_pos : start_pos + length] start_pos = start_pos + length - if self._payload_length == 0: - if self._has_mask: - assert self._frame_mask is not None - _websocket_mask(self._frame_mask, payload) + if self._payload_length != 0: + break - frames.append( - (self._frame_fin, self._frame_opcode, payload, self._compressed) - ) + if self._has_mask: + assert self._frame_mask is not None + _websocket_mask(self._frame_mask, payload) - self._frame_payload = bytearray() - self._state = WSParserState.READ_HEADER - else: - break + frames.append( + (self._frame_fin, self._frame_opcode, payload, self._compressed) + ) + self._frame_payload = bytearray() + self._state = WSParserState.READ_HEADER self._tail = buf[start_pos:] @@ -612,7 +612,7 @@ def __init__( self.protocol = protocol self.transport = transport self.use_mask = use_mask - self.randrange = random.randrange + self.get_random_bits = partial(random.getrandbits, 32) self.compress = compress self.notakeover = notakeover self._closing = False @@ -625,14 +625,20 @@ async def _send_frame( ) -> None: """Send a frame over the websocket with message as its payload.""" if self._closing and not (opcode & WSMsgType.CLOSE): - raise ConnectionResetError("Cannot write to closing transport") + raise ClientConnectionResetError("Cannot write to closing transport") + # RSV are the reserved bits in the frame header. They are used to + # indicate that the frame is using an extension. + # https://datatracker.ietf.org/doc/html/rfc6455#section-5.2 rsv = 0 - # Only compress larger packets (disabled) # Does small packet needs to be compressed? # if self.compress and opcode < 8 and len(message) > 124: if (compress or self.compress) and opcode < 8: + # RSV1 (rsv = 0x40) is set for compressed frames + # https://datatracker.ietf.org/doc/html/rfc7692#section-7.2.3.1 + rsv = 0x40 + if compress: # Do not set self._compress if compressing is for this frame compressobj = self._make_compress_obj(compress) @@ -651,29 +657,39 @@ async def _send_frame( ) if message.endswith(_WS_DEFLATE_TRAILING): message = message[:-4] - rsv = rsv | 0x40 msg_length = len(message) use_mask = self.use_mask - if use_mask: - mask_bit = 0x80 - else: - mask_bit = 0 + mask_bit = 0x80 if use_mask else 0 + # Depending on the message length, the header is assembled differently. + # The first byte is reserved for the opcode and the RSV bits. + first_byte = 0x80 | rsv | opcode if msg_length < 126: - header = PACK_LEN1(0x80 | rsv | opcode, msg_length | mask_bit) + header = PACK_LEN1(first_byte, msg_length | mask_bit) + header_len = 2 elif msg_length < (1 << 16): - header = PACK_LEN2(0x80 | rsv | opcode, 126 | mask_bit, msg_length) + header = PACK_LEN2(first_byte, 126 | mask_bit, msg_length) + header_len = 4 else: - header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length) + header = PACK_LEN3(first_byte, 127 | mask_bit, msg_length) + header_len = 10 + + # https://datatracker.ietf.org/doc/html/rfc6455#section-5.3 + # If we are using a mask, we need to generate it randomly + # and apply it to the message before sending it. A mask is + # a 32-bit value that is applied to the message using a + # bitwise XOR operation. It is used to prevent certain types + # of attacks on the websocket protocol. The mask is only used + # when aiohttp is acting as a client. Servers do not use a mask. if use_mask: - mask_int = self.randrange(0, 0xFFFFFFFF) - mask = mask_int.to_bytes(4, "big") + mask = PACK_RANDBITS(self.get_random_bits()) message = bytearray(message) _websocket_mask(mask, message) self._write(header + mask + message) - self._output_size += len(header) + len(mask) + msg_length + self._output_size += header_len + MASK_LEN + msg_length + else: if msg_length > MSG_SIZE: self._write(header) @@ -681,11 +697,16 @@ async def _send_frame( else: self._write(header + message) - self._output_size += len(header) + msg_length + self._output_size += header_len + msg_length # It is safe to return control to the event loop when using compression # after this point as we have already sent or buffered all the data. + # Once we have written output_size up to the limit, we call the + # drain helper which waits for the transport to be ready to accept + # more data. This is a flow control mechanism to prevent the buffer + # from growing too large. The drain helper will return right away + # if the writer is not paused. if self._output_size > self._limit: self._output_size = 0 await self.protocol._drain_helper() @@ -699,7 +720,7 @@ def _make_compress_obj(self, compress: int) -> ZLibCompressor: def _write(self, data: bytes) -> None: if self.transport is None or self.transport.is_closing(): - raise ConnectionResetError("Cannot write to closing transport") + raise ClientConnectionResetError("Cannot write to closing transport") self.transport.write(data) async def pong(self, message: Union[bytes, str] = b"") -> None: diff --git a/aiohttp/http_writer.py b/aiohttp/http_writer.py index d6b02e6..dc07a35 100644 --- a/aiohttp/http_writer.py +++ b/aiohttp/http_writer.py @@ -8,6 +8,7 @@ from .abc import AbstractStreamWriter from .base_protocol import BaseProtocol +from .client_exceptions import ClientConnectionResetError from .compression_utils import ZLibCompressor from .helpers import NO_EXTENSIONS @@ -70,9 +71,9 @@ def _write(self, chunk: bytes) -> None: size = len(chunk) self.buffer_size += size self.output_size += size - transport = self.transport - if not self._protocol.connected or transport is None or transport.is_closing(): - raise ConnectionResetError("Cannot write to closing transport") + transport = self._protocol.transport + if transport is None or transport.is_closing(): + raise ClientConnectionResetError("Cannot write to closing transport") transport.write(chunk) async def write( diff --git a/aiohttp/locks.py b/aiohttp/locks.py deleted file mode 100644 index de2dc83..0000000 --- a/aiohttp/locks.py +++ /dev/null @@ -1,41 +0,0 @@ -import asyncio -import collections -from typing import Any, Deque, Optional - - -class EventResultOrError: - """Event asyncio lock helper class. - - Wraps the Event asyncio lock allowing either to awake the - locked Tasks without any error or raising an exception. - - thanks to @vorpalsmith for the simple design. - """ - - def __init__(self, loop: asyncio.AbstractEventLoop) -> None: - self._loop = loop - self._exc: Optional[BaseException] = None - self._event = asyncio.Event() - self._waiters: Deque[asyncio.Future[Any]] = collections.deque() - - def set(self, exc: Optional[BaseException] = None) -> None: - self._exc = exc - self._event.set() - - async def wait(self) -> Any: - waiter = self._loop.create_task(self._event.wait()) - self._waiters.append(waiter) - try: - val = await waiter - finally: - self._waiters.remove(waiter) - - if self._exc is not None: - raise self._exc - - return val - - def cancel(self) -> None: - """Cancel all waiters""" - for waiter in self._waiters: - waiter.cancel() diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index 71fc265..965e4f2 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -2,6 +2,7 @@ import binascii import json import re +import sys import uuid import warnings import zlib @@ -10,7 +11,6 @@ from typing import ( TYPE_CHECKING, Any, - AsyncIterator, Deque, Dict, Iterator, @@ -48,6 +48,13 @@ ) from .streams import StreamReader +if sys.version_info >= (3, 11): + from typing import Self +else: + from typing import TypeVar + + Self = TypeVar("Self", bound="BodyPartReader") + __all__ = ( "MultipartReader", "MultipartWriter", @@ -266,6 +273,7 @@ def __init__( ) -> None: self.headers = headers self._boundary = boundary + self._boundary_len = len(boundary) + 2 # Boundary + \r\n self._content = content self._default_charset = default_charset self._at_eof = False @@ -279,8 +287,8 @@ def __init__( self._content_eof = 0 self._cache: Dict[str, Any] = {} - def __aiter__(self) -> AsyncIterator["BodyPartReader"]: - return self # type: ignore[return-value] + def __aiter__(self: Self) -> Self: + return self async def __anext__(self) -> bytes: part = await self.next() @@ -322,6 +330,31 @@ async def read_chunk(self, size: int = chunk_size) -> bytes: else: chunk = await self._read_chunk_from_stream(size) + # For the case of base64 data, we must read a fragment of size with a + # remainder of 0 by dividing by 4 for string without symbols \n or \r + encoding = self.headers.get(CONTENT_TRANSFER_ENCODING) + if encoding and encoding.lower() == "base64": + stripped_chunk = b"".join(chunk.split()) + remainder = len(stripped_chunk) % 4 + + while remainder != 0 and not self.at_eof(): + over_chunk_size = 4 - remainder + over_chunk = b"" + + if self._prev_chunk: + over_chunk = self._prev_chunk[:over_chunk_size] + self._prev_chunk = self._prev_chunk[len(over_chunk) :] + + if len(over_chunk) != over_chunk_size: + over_chunk += await self._content.read(4 - len(over_chunk)) + + if not over_chunk: + self._at_eof = True + + stripped_chunk += b"".join(over_chunk.split()) + chunk += over_chunk + remainder = len(stripped_chunk) % 4 + self._read_bytes += len(chunk) if self._read_bytes == self._length: self._at_eof = True @@ -346,15 +379,25 @@ async def _read_chunk_from_stream(self, size: int) -> bytes: # Reads content chunk of body part with unknown length. # The Content-Length header for body part is not necessary. assert ( - size >= len(self._boundary) + 2 + size >= self._boundary_len ), "Chunk size must be greater or equal than boundary length + 2" first_chunk = self._prev_chunk is None if first_chunk: self._prev_chunk = await self._content.read(size) - chunk = await self._content.read(size) - self._content_eof += int(self._content.at_eof()) - assert self._content_eof < 3, "Reading after EOF" + chunk = b"" + # content.read() may return less than size, so we need to loop to ensure + # we have enough data to detect the boundary. + while len(chunk) < self._boundary_len: + chunk += await self._content.read(size) + self._content_eof += int(self._content.at_eof()) + assert self._content_eof < 3, "Reading after EOF" + if self._content_eof: + break + if len(chunk) > size: + self._content.unread_data(chunk[size:]) + chunk = chunk[:size] + assert self._prev_chunk is not None window = self._prev_chunk + chunk sub = b"\r\n" + self._boundary @@ -518,6 +561,8 @@ def filename(self) -> Optional[str]: @payload_type(BodyPartReader, order=Order.try_first) class BodyPartReaderPayload(Payload): + _value: BodyPartReader + def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None: super().__init__(value, *args, **kwargs) @@ -530,6 +575,9 @@ def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None: if params: self.set_content_disposition("attachment", True, **params) + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + raise TypeError("Unable to decode.") + async def write(self, writer: Any) -> None: field = self._value chunk = await field.read_chunk(size=2**16) @@ -545,7 +593,7 @@ class MultipartReader: response_wrapper_cls = MultipartResponseWrapper #: Multipart reader class, used to handle multipart/* body parts. #: None points to type(self) - multipart_reader_cls = None + multipart_reader_cls: Optional[Type["MultipartReader"]] = None #: Body part reader class for non multipart/* content types. part_reader_cls = BodyPartReader @@ -566,10 +614,8 @@ def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None: self._at_bof = True self._unread: List[bytes] = [] - def __aiter__( - self, - ) -> AsyncIterator["BodyPartReader"]: - return self # type: ignore[return-value] + def __aiter__(self: Self) -> Self: + return self async def __anext__( self, @@ -749,6 +795,8 @@ async def _maybe_release_last_part(self) -> None: class MultipartWriter(Payload): """Multipart body writer.""" + _value: None + def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> None: boundary = boundary if boundary is not None else uuid.uuid4().hex # The underlying Payload API demands a str (utf-8), not bytes, @@ -929,6 +977,16 @@ def size(self) -> Optional[int]: total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n' return total + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + return "".join( + "--" + + self.boundary + + "\n" + + part._binary_headers.decode(encoding, errors) + + part.decode() + for part, _e, _te in self._parts + ) + async def write(self, writer: Any, close_boundary: bool = True) -> None: """Write body.""" for part, encoding, te_encoding in self._parts: diff --git a/aiohttp/payload.py b/aiohttp/payload.py index 6593b05..2763697 100644 --- a/aiohttp/payload.py +++ b/aiohttp/payload.py @@ -11,7 +11,6 @@ IO, TYPE_CHECKING, Any, - ByteString, Dict, Final, Iterable, @@ -208,6 +207,13 @@ def set_content_disposition( disptype, quote_fields=quote_fields, _charset=_charset, **params ) + @abstractmethod + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + """Return string representation of the value. + + This is named decode() to allow compatibility with bytes objects. + """ + @abstractmethod async def write(self, writer: AbstractStreamWriter) -> None: """Write payload. @@ -217,7 +223,11 @@ async def write(self, writer: AbstractStreamWriter) -> None: class BytesPayload(Payload): - def __init__(self, value: ByteString, *args: Any, **kwargs: Any) -> None: + _value: bytes + + def __init__( + self, value: Union[bytes, bytearray, memoryview], *args: Any, **kwargs: Any + ) -> None: if not isinstance(value, (bytes, bytearray, memoryview)): raise TypeError(f"value argument must be byte-ish, not {type(value)!r}") @@ -241,6 +251,9 @@ def __init__(self, value: ByteString, *args: Any, **kwargs: Any) -> None: **kwargs, ) + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + return self._value.decode(encoding, errors) + async def write(self, writer: AbstractStreamWriter) -> None: await writer.write(self._value) @@ -282,7 +295,7 @@ def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None: class IOBasePayload(Payload): - _value: IO[Any] + _value: io.IOBase def __init__( self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any @@ -306,9 +319,12 @@ async def write(self, writer: AbstractStreamWriter) -> None: finally: await loop.run_in_executor(None, self._value.close) + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + return "".join(r.decode(encoding, errors) for r in self._value.readlines()) + class TextIOPayload(IOBasePayload): - _value: TextIO + _value: io.TextIOBase def __init__( self, @@ -345,6 +361,9 @@ def size(self) -> Optional[int]: except OSError: return None + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + return self._value.read() + async def write(self, writer: AbstractStreamWriter) -> None: loop = asyncio.get_event_loop() try: @@ -362,6 +381,8 @@ async def write(self, writer: AbstractStreamWriter) -> None: class BytesIOPayload(IOBasePayload): + _value: io.BytesIO + @property def size(self) -> int: position = self._value.tell() @@ -369,17 +390,27 @@ def size(self) -> int: self._value.seek(position) return end - position + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + return self._value.read().decode(encoding, errors) + class BufferedReaderPayload(IOBasePayload): + _value: io.BufferedIOBase + @property def size(self) -> Optional[int]: try: return os.fstat(self._value.fileno()).st_size - self._value.tell() - except OSError: + except (OSError, AttributeError): # data.fileno() is not supported, e.g. # io.BufferedReader(io.BytesIO(b'data')) + # For some file-like objects (e.g. tarfile), the fileno() attribute may + # not exist at all, and will instead raise an AttributeError. return None + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + return self._value.read().decode(encoding, errors) + class JsonPayload(BytesPayload): def __init__( @@ -416,6 +447,7 @@ def __init__( class AsyncIterablePayload(Payload): _iter: Optional[_AsyncIterator] = None + _value: _AsyncIterable def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None: if not isinstance(value, AsyncIterable): @@ -443,6 +475,9 @@ async def write(self, writer: AbstractStreamWriter) -> None: except StopAsyncIteration: self._iter = None + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + raise TypeError("Unable to decode.") + class StreamReaderPayload(AsyncIterablePayload): def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None: diff --git a/aiohttp/payload_streamer.py b/aiohttp/payload_streamer.py index 364f763..831fdc0 100644 --- a/aiohttp/payload_streamer.py +++ b/aiohttp/payload_streamer.py @@ -65,6 +65,9 @@ class StreamWrapperPayload(Payload): async def write(self, writer: AbstractStreamWriter) -> None: await self._value(writer) + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + raise TypeError("Unable to decode.") + @payload_type(streamer) class StreamPayload(StreamWrapperPayload): diff --git a/aiohttp/pytest_plugin.py b/aiohttp/pytest_plugin.py index 5754747..55964ea 100644 --- a/aiohttp/pytest_plugin.py +++ b/aiohttp/pytest_plugin.py @@ -1,13 +1,21 @@ import asyncio import contextlib +import inspect import warnings -from typing import Any, Awaitable, Callable, Dict, Iterator, Optional, Type, Union +from typing import ( + Any, + Awaitable, + Callable, + Dict, + Iterator, + Optional, + Protocol, + Type, + Union, +) import pytest -from aiohttp.helpers import isasyncgenfunction -from aiohttp.web import Application - from .test_utils import ( BaseTestServer, RawTestServer, @@ -18,15 +26,35 @@ teardown_test_loop, unused_port as _unused_port, ) +from .web import Application +from .web_protocol import _RequestHandler try: import uvloop except ImportError: # pragma: no cover uvloop = None # type: ignore[assignment] -AiohttpClient = Callable[[Union[Application, BaseTestServer]], Awaitable[TestClient]] -AiohttpRawServer = Callable[[Application], Awaitable[RawTestServer]] -AiohttpServer = Callable[[Application], Awaitable[TestServer]] + +class AiohttpClient(Protocol): + def __call__( + self, + __param: Union[Application, BaseTestServer], + *, + server_kwargs: Optional[Dict[str, Any]] = None, + **kwargs: Any + ) -> Awaitable[TestClient]: ... + + +class AiohttpServer(Protocol): + def __call__( + self, app: Application, *, port: Optional[int] = None, **kwargs: Any + ) -> Awaitable[TestServer]: ... + + +class AiohttpRawServer(Protocol): + def __call__( + self, handler: _RequestHandler, *, port: Optional[int] = None, **kwargs: Any + ) -> Awaitable[RawTestServer]: ... def pytest_addoption(parser): # type: ignore[no-untyped-def] @@ -57,7 +85,7 @@ def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def] """ func = fixturedef.func - if isasyncgenfunction(func): + if inspect.isasyncgenfunction(func): # async generator fixture is_async_gen = True elif asyncio.iscoroutinefunction(func): @@ -262,7 +290,9 @@ def aiohttp_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpServer]: """ servers = [] - async def go(app, *, port=None, **kwargs): # type: ignore[no-untyped-def] + async def go( + app: Application, *, port: Optional[int] = None, **kwargs: Any + ) -> TestServer: server = TestServer(app, port=port) await server.start_server(loop=loop, **kwargs) servers.append(server) @@ -295,7 +325,9 @@ def aiohttp_raw_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpRawSe """ servers = [] - async def go(handler, *, port=None, **kwargs): # type: ignore[no-untyped-def] + async def go( + handler: _RequestHandler, *, port: Optional[int] = None, **kwargs: Any + ) -> RawTestServer: server = RawTestServer(handler, port=port) await server.start_server(loop=loop, **kwargs) servers.append(server) diff --git a/aiohttp/resolver.py b/aiohttp/resolver.py index 6c17b1e..c01a46f 100644 --- a/aiohttp/resolver.py +++ b/aiohttp/resolver.py @@ -1,20 +1,25 @@ import asyncio import socket -from typing import Any, Dict, List, Optional, Type, Union +import sys +from typing import Any, Dict, List, Optional, Tuple, Type, Union -from .abc import AbstractResolver -from .helpers import get_running_loop +from .abc import AbstractResolver, ResolveResult __all__ = ("ThreadedResolver", "AsyncResolver", "DefaultResolver") + try: import aiodns - # aiodns_default = hasattr(aiodns.DNSResolver, 'gethostbyname') + aiodns_default = hasattr(aiodns.DNSResolver, "getaddrinfo") except ImportError: # pragma: no cover - aiodns = None + aiodns = None # type: ignore[assignment] + aiodns_default = False + -aiodns_default = False +_NUMERIC_SOCKET_FLAGS = socket.AI_NUMERICHOST | socket.AI_NUMERICSERV +_NAME_SOCKET_FLAGS = socket.NI_NUMERICHOST | socket.NI_NUMERICSERV +_SUPPORTS_SCOPE_ID = sys.version_info >= (3, 9, 0) class ThreadedResolver(AbstractResolver): @@ -25,48 +30,48 @@ class ThreadedResolver(AbstractResolver): """ def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: - self._loop = get_running_loop(loop) + self._loop = loop or asyncio.get_running_loop() async def resolve( - self, hostname: str, port: int = 0, family: int = socket.AF_INET - ) -> List[Dict[str, Any]]: + self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET + ) -> List[ResolveResult]: infos = await self._loop.getaddrinfo( - hostname, + host, port, type=socket.SOCK_STREAM, family=family, flags=socket.AI_ADDRCONFIG, ) - hosts = [] + hosts: List[ResolveResult] = [] for family, _, proto, _, address in infos: if family == socket.AF_INET6: if len(address) < 3: # IPv6 is not supported by Python build, # or IPv6 is not enabled in the host continue - if address[3]: + if address[3] and _SUPPORTS_SCOPE_ID: # This is essential for link-local IPv6 addresses. # LL IPv6 is a VERY rare case. Strictly speaking, we should use # getnameinfo() unconditionally, but performance makes sense. - host, _port = socket.getnameinfo( - address, socket.NI_NUMERICHOST | socket.NI_NUMERICSERV + resolved_host, _port = await self._loop.getnameinfo( + address, _NAME_SOCKET_FLAGS ) port = int(_port) else: - host, port = address[:2] + resolved_host, port = address[:2] else: # IPv4 assert family == socket.AF_INET - host, port = address # type: ignore[misc] + resolved_host, port = address # type: ignore[misc] hosts.append( - { - "hostname": hostname, - "host": host, - "port": port, - "family": family, - "proto": proto, - "flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV, - } + ResolveResult( + hostname=host, + host=resolved_host, + port=port, + family=family, + proto=proto, + flags=_NUMERIC_SOCKET_FLAGS, + ) ) return hosts @@ -87,36 +92,60 @@ def __init__( if aiodns is None: raise RuntimeError("Resolver requires aiodns library") - self._loop = get_running_loop(loop) - self._resolver = aiodns.DNSResolver(*args, loop=loop, **kwargs) + self._resolver = aiodns.DNSResolver(*args, **kwargs) if not hasattr(self._resolver, "gethostbyname"): # aiodns 1.1 is not available, fallback to DNSResolver.query self.resolve = self._resolve_with_query # type: ignore async def resolve( - self, host: str, port: int = 0, family: int = socket.AF_INET - ) -> List[Dict[str, Any]]: + self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET + ) -> List[ResolveResult]: try: - resp = await self._resolver.gethostbyname(host, family) + resp = await self._resolver.getaddrinfo( + host, + port=port, + type=socket.SOCK_STREAM, + family=family, + flags=socket.AI_ADDRCONFIG, + ) except aiodns.error.DNSError as exc: msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" - raise OSError(msg) from exc - hosts = [] - for address in resp.addresses: + raise OSError(None, msg) from exc + hosts: List[ResolveResult] = [] + for node in resp.nodes: + address: Union[Tuple[bytes, int], Tuple[bytes, int, int, int]] = node.addr + family = node.family + if family == socket.AF_INET6: + if len(address) > 3 and address[3] and _SUPPORTS_SCOPE_ID: + # This is essential for link-local IPv6 addresses. + # LL IPv6 is a VERY rare case. Strictly speaking, we should use + # getnameinfo() unconditionally, but performance makes sense. + result = await self._resolver.getnameinfo( + (address[0].decode("ascii"), *address[1:]), + _NAME_SOCKET_FLAGS, + ) + resolved_host = result.node + else: + resolved_host = address[0].decode("ascii") + port = address[1] + else: # IPv4 + assert family == socket.AF_INET + resolved_host = address[0].decode("ascii") + port = address[1] hosts.append( - { - "hostname": host, - "host": address, - "port": port, - "family": family, - "proto": 0, - "flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV, - } + ResolveResult( + hostname=host, + host=resolved_host, + port=port, + family=family, + proto=0, + flags=_NUMERIC_SOCKET_FLAGS, + ) ) if not hosts: - raise OSError("DNS lookup failed") + raise OSError(None, "DNS lookup failed") return hosts @@ -132,7 +161,7 @@ async def _resolve_with_query( resp = await self._resolver.query(host, qtype) except aiodns.error.DNSError as exc: msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" - raise OSError(msg) from exc + raise OSError(None, msg) from exc hosts = [] for rr in resp: @@ -148,7 +177,7 @@ async def _resolve_with_query( ) if not hosts: - raise OSError("DNS lookup failed") + raise OSError(None, "DNS lookup failed") return hosts diff --git a/aiohttp/streams.py b/aiohttp/streams.py index b9b9c3f..c927cfb 100644 --- a/aiohttp/streams.py +++ b/aiohttp/streams.py @@ -296,6 +296,9 @@ def end_http_chunk_receiving(self) -> None: set_result(waiter, None) async def _wait(self, func_name: str) -> None: + if not self._protocol.connected: + raise RuntimeError("Connection closed.") + # StreamReader uses a future to link the protocol feed_data() method # to a read coroutine. Running two read coroutines at the same time # would have an unexpected behaviour. It would not possible to know diff --git a/aiohttp/test_utils.py b/aiohttp/test_utils.py index a36e859..850efcb 100644 --- a/aiohttp/test_utils.py +++ b/aiohttp/test_utils.py @@ -11,17 +11,7 @@ import warnings from abc import ABC, abstractmethod from types import TracebackType -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Iterator, - List, - Optional, - Type, - Union, - cast, -) +from typing import TYPE_CHECKING, Any, Callable, Iterator, List, Optional, Type, cast from unittest import IsolatedAsyncioTestCase, mock from aiosignal import Signal @@ -29,7 +19,11 @@ from yarl import URL import aiohttp -from aiohttp.client import _RequestContextManager, _WSRequestContextManager +from aiohttp.client import ( + _RequestContextManager, + _RequestOptions, + _WSRequestContextManager, +) from . import ClientSession, hdrs from .abc import AbstractCookieJar @@ -37,6 +31,7 @@ from .client_ws import ClientWebSocketResponse from .helpers import sentinel from .http import HttpVersion, RawRequestMessage +from .streams import EMPTY_PAYLOAD, StreamReader from .typedefs import StrOrURL from .web import ( Application, @@ -55,6 +50,9 @@ else: SSLContext = None +if sys.version_info >= (3, 11) and TYPE_CHECKING: + from typing import Unpack + REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin" @@ -90,7 +88,7 @@ class BaseTestServer(ABC): def __init__( self, *, - scheme: Union[str, object] = sentinel, + scheme: str = "", loop: Optional[asyncio.AbstractEventLoop] = None, host: str = "127.0.0.1", port: Optional[int] = None, @@ -121,10 +119,13 @@ async def start_server( await self.runner.setup() if not self.port: self.port = 0 + absolute_host = self.host try: version = ipaddress.ip_address(self.host).version except ValueError: version = 4 + if version == 6: + absolute_host = f"[{self.host}]" family = socket.AF_INET6 if version == 6 else socket.AF_INET _sock = self.socket_factory(self.host, self.port, family) self.host, self.port = _sock.getsockname()[:2] @@ -135,13 +136,9 @@ async def start_server( sockets = server.sockets # type: ignore[attr-defined] assert sockets is not None self.port = sockets[0].getsockname()[1] - if self.scheme is sentinel: - if self._ssl: - scheme = "https" - else: - scheme = "http" - self.scheme = scheme - self._root = URL(f"{self.scheme}://{self.host}:{self.port}") + if not self.scheme: + self.scheme = "https" if self._ssl else "http" + self._root = URL(f"{self.scheme}://{absolute_host}:{self.port}") @abstractmethod # pragma: no cover async def _make_runner(self, **kwargs: Any) -> BaseRunner: @@ -151,7 +148,7 @@ def make_url(self, path: StrOrURL) -> URL: assert self._root is not None url = URL(path) if not self.skip_url_asserts: - assert not url.is_absolute() + assert not url.absolute return self._root.join(url) else: return URL(str(self._root) + str(path)) @@ -222,7 +219,7 @@ def __init__( self, app: Application, *, - scheme: Union[str, object] = sentinel, + scheme: str = "", host: str = "127.0.0.1", port: Optional[int] = None, **kwargs: Any, @@ -239,7 +236,7 @@ def __init__( self, handler: _RequestHandler, *, - scheme: Union[str, object] = sentinel, + scheme: str = "", host: str = "127.0.0.1", port: Optional[int] = None, **kwargs: Any, @@ -324,45 +321,101 @@ async def _request( self._responses.append(resp) return resp - def request( - self, method: str, path: StrOrURL, **kwargs: Any - ) -> _RequestContextManager: - """Routes a request to tested http server. + if sys.version_info >= (3, 11) and TYPE_CHECKING: + + def request( + self, method: str, path: StrOrURL, **kwargs: Unpack[_RequestOptions] + ) -> _RequestContextManager: ... + + def get( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + def options( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + def head( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + def post( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + def put( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + def patch( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + def delete( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... - The interface is identical to aiohttp.ClientSession.request, - except the loop kwarg is overridden by the instance used by the - test server. + else: - """ - return _RequestContextManager(self._request(method, path, **kwargs)) + def request( + self, method: str, path: StrOrURL, **kwargs: Any + ) -> _RequestContextManager: + """Routes a request to tested http server. - def get(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP GET request.""" - return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs)) + The interface is identical to aiohttp.ClientSession.request, + except the loop kwarg is overridden by the instance used by the + test server. - def post(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP POST request.""" - return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs)) + """ + return _RequestContextManager(self._request(method, path, **kwargs)) - def options(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP OPTIONS request.""" - return _RequestContextManager(self._request(hdrs.METH_OPTIONS, path, **kwargs)) + def get(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP GET request.""" + return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs)) - def head(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP HEAD request.""" - return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs)) + def post(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP POST request.""" + return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs)) - def put(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP PUT request.""" - return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs)) + def options(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP OPTIONS request.""" + return _RequestContextManager( + self._request(hdrs.METH_OPTIONS, path, **kwargs) + ) + + def head(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP HEAD request.""" + return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs)) + + def put(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP PUT request.""" + return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs)) - def patch(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP PATCH request.""" - return _RequestContextManager(self._request(hdrs.METH_PATCH, path, **kwargs)) + def patch(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP PATCH request.""" + return _RequestContextManager( + self._request(hdrs.METH_PATCH, path, **kwargs) + ) - def delete(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP PATCH request.""" - return _RequestContextManager(self._request(hdrs.METH_DELETE, path, **kwargs)) + def delete(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP PATCH request.""" + return _RequestContextManager( + self._request(hdrs.METH_DELETE, path, **kwargs) + ) def ws_connect(self, path: StrOrURL, **kwargs: Any) -> _WSRequestContextManager: """Initiate websocket connection. @@ -582,7 +635,7 @@ def make_mocked_request( writer: Any = sentinel, protocol: Any = sentinel, transport: Any = sentinel, - payload: Any = sentinel, + payload: StreamReader = EMPTY_PAYLOAD, sslcontext: Optional[SSLContext] = None, client_max_size: int = 1024**2, loop: Any = ..., @@ -651,9 +704,6 @@ def make_mocked_request( protocol.transport = transport protocol.writer = writer - if payload is sentinel: - payload = mock.Mock() - req = Request( message, payload, protocol, writer, task, loop, client_max_size=client_max_size ) diff --git a/aiohttp/tracing.py b/aiohttp/tracing.py index 62847a0..012ed7b 100644 --- a/aiohttp/tracing.py +++ b/aiohttp/tracing.py @@ -1,5 +1,5 @@ from types import SimpleNamespace -from typing import TYPE_CHECKING, Awaitable, Optional, Protocol, Type, TypeVar +from typing import TYPE_CHECKING, Awaitable, Mapping, Optional, Protocol, Type, TypeVar import attr from aiosignal import Signal @@ -19,8 +19,7 @@ def __call__( __client_session: ClientSession, __trace_config_ctx: SimpleNamespace, __params: _ParamT_contra, - ) -> Awaitable[None]: - ... + ) -> Awaitable[None]: ... __all__ = ( @@ -50,9 +49,9 @@ class TraceConfig: def __init__( self, trace_config_ctx_factory: Type[SimpleNamespace] = SimpleNamespace ) -> None: - self._on_request_start: Signal[ - _SignalCallback[TraceRequestStartParams] - ] = Signal(self) + self._on_request_start: Signal[_SignalCallback[TraceRequestStartParams]] = ( + Signal(self) + ) self._on_request_chunk_sent: Signal[ _SignalCallback[TraceRequestChunkSentParams] ] = Signal(self) @@ -89,12 +88,12 @@ def __init__( self._on_dns_resolvehost_end: Signal[ _SignalCallback[TraceDnsResolveHostEndParams] ] = Signal(self) - self._on_dns_cache_hit: Signal[ - _SignalCallback[TraceDnsCacheHitParams] - ] = Signal(self) - self._on_dns_cache_miss: Signal[ - _SignalCallback[TraceDnsCacheMissParams] - ] = Signal(self) + self._on_dns_cache_hit: Signal[_SignalCallback[TraceDnsCacheHitParams]] = ( + Signal(self) + ) + self._on_dns_cache_miss: Signal[_SignalCallback[TraceDnsCacheMissParams]] = ( + Signal(self) + ) self._on_request_headers_sent: Signal[ _SignalCallback[TraceRequestHeadersSentParams] ] = Signal(self) @@ -102,7 +101,7 @@ def __init__( self._trace_config_ctx_factory = trace_config_ctx_factory def trace_config_ctx( - self, trace_request_ctx: Optional[SimpleNamespace] = None + self, trace_request_ctx: Optional[Mapping[str, str]] = None ) -> SimpleNamespace: """Return a new trace_config_ctx instance""" return self._trace_config_ctx_factory(trace_request_ctx=trace_request_ctx) diff --git a/aiohttp/typedefs.py b/aiohttp/typedefs.py index 5e963e1..cc8c082 100644 --- a/aiohttp/typedefs.py +++ b/aiohttp/typedefs.py @@ -7,12 +7,15 @@ Callable, Iterable, Mapping, + Protocol, Tuple, Union, ) from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr -from yarl import URL +from yarl import URL, Query as _Query + +Query = _Query DEFAULT_JSON_ENCODER = json.dumps DEFAULT_JSON_DECODER = json.loads @@ -34,7 +37,13 @@ Byteish = Union[bytes, bytearray, memoryview] JSONEncoder = Callable[[Any], str] JSONDecoder = Callable[[str], Any] -LooseHeaders = Union[Mapping[Union[str, istr], str], _CIMultiDict, _CIMultiDictProxy] +LooseHeaders = Union[ + Mapping[str, str], + Mapping[istr, str], + _CIMultiDict, + _CIMultiDictProxy, + Iterable[Tuple[Union[str, istr], str]], +] RawHeaders = Tuple[Tuple[bytes, bytes], ...] StrOrURL = Union[str, URL] @@ -49,6 +58,12 @@ ] Handler = Callable[["Request"], Awaitable["StreamResponse"]] -Middleware = Callable[["Request", Handler], Awaitable["StreamResponse"]] + + +class Middleware(Protocol): + def __call__( + self, request: "Request", handler: Handler + ) -> Awaitable["StreamResponse"]: ... + PathLike = Union[str, "os.PathLike[str]"] diff --git a/aiohttp/web.py b/aiohttp/web.py index e911650..88bf14b 100644 --- a/aiohttp/web.py +++ b/aiohttp/web.py @@ -7,7 +7,6 @@ from argparse import ArgumentParser from collections.abc import Iterable from contextlib import suppress -from functools import partial from importlib import import_module from typing import ( Any, @@ -21,7 +20,6 @@ Union, cast, ) -from weakref import WeakSet from .abc import AbstractAccessLogger from .helpers import AppKey as AppKey @@ -320,23 +318,6 @@ async def _run_app( reuse_port: Optional[bool] = None, handler_cancellation: bool = False, ) -> None: - async def wait( - starting_tasks: "WeakSet[asyncio.Task[object]]", shutdown_timeout: float - ) -> None: - # Wait for pending tasks for a given time limit. - t = asyncio.current_task() - assert t is not None - starting_tasks.add(t) - with suppress(asyncio.TimeoutError): - await asyncio.wait_for(_wait(starting_tasks), timeout=shutdown_timeout) - - async def _wait(exclude: "WeakSet[asyncio.Task[object]]") -> None: - t = asyncio.current_task() - assert t is not None - exclude.add(t) - while tasks := asyncio.all_tasks().difference(exclude): - await asyncio.wait(tasks) - # An internal function to actually do all dirty job for application running if asyncio.iscoroutine(app): app = await app @@ -355,12 +336,6 @@ async def _wait(exclude: "WeakSet[asyncio.Task[object]]") -> None: ) await runner.setup() - # On shutdown we want to avoid waiting on tasks which run forever. - # It's very likely that all tasks which run forever will have been created by - # the time we have completed the application startup (in runner.setup()), - # so we just record all running tasks here and exclude them later. - starting_tasks: "WeakSet[asyncio.Task[object]]" = WeakSet(asyncio.all_tasks()) - runner.shutdown_callback = partial(wait, starting_tasks, shutdown_timeout) sites: List[BaseSite] = [] @@ -545,10 +520,14 @@ def run_app( except (GracefulExit, KeyboardInterrupt): # pragma: no cover pass finally: - _cancel_tasks({main_task}, loop) - _cancel_tasks(asyncio.all_tasks(loop), loop) - loop.run_until_complete(loop.shutdown_asyncgens()) - loop.close() + try: + main_task.cancel() + with suppress(asyncio.CancelledError): + loop.run_until_complete(main_task) + finally: + _cancel_tasks(asyncio.all_tasks(loop), loop) + loop.run_until_complete(loop.shutdown_asyncgens()) + loop.close() def main(argv: List[str]) -> None: diff --git a/aiohttp/web_app.py b/aiohttp/web_app.py index 91bf5fd..78b1a67 100644 --- a/aiohttp/web_app.py +++ b/aiohttp/web_app.py @@ -1,7 +1,7 @@ import asyncio import logging import warnings -from functools import partial, update_wrapper +from functools import lru_cache, partial, update_wrapper from typing import ( TYPE_CHECKING, Any, @@ -38,7 +38,7 @@ from .http_parser import RawRequestMessage from .log import web_logger from .streams import StreamReader -from .typedefs import Middleware +from .typedefs import Handler, Middleware from .web_exceptions import NotAppKeyWarning from .web_log import AccessLogger from .web_middlewares import _fix_request_current_app @@ -76,6 +76,18 @@ _T = TypeVar("_T") _U = TypeVar("_U") +_Resource = TypeVar("_Resource", bound=AbstractResource) + + +@lru_cache(None) +def _build_middlewares( + handler: Handler, apps: Tuple["Application", ...] +) -> Callable[[Request], Awaitable[StreamResponse]]: + """Apply middlewares to handler.""" + for app in apps[::-1]: + for m, _ in app._middlewares_handlers: # type: ignore[union-attr] + handler = update_wrapper(partial(m, handler=handler), handler) # type: ignore[misc] + return handler class Application(MutableMapping[Union[str, AppKey[Any]], Any]): @@ -88,6 +100,7 @@ class Application(MutableMapping[Union[str, AppKey[Any]], Any]): "_handler_args", "_middlewares", "_middlewares_handlers", + "_has_legacy_middlewares", "_run_middlewares", "_state", "_frozen", @@ -142,6 +155,7 @@ def __init__( self._middlewares_handlers: _MiddlewaresHandlers = None # initialized on freezing self._run_middlewares: Optional[bool] = None + self._has_legacy_middlewares: bool = True self._state: Dict[Union[AppKey[Any], str], object] = {} self._frozen = False @@ -183,12 +197,10 @@ def __eq__(self, other: object) -> bool: return self is other @overload # type: ignore[override] - def __getitem__(self, key: AppKey[_T]) -> _T: - ... + def __getitem__(self, key: AppKey[_T]) -> _T: ... @overload - def __getitem__(self, key: str) -> Any: - ... + def __getitem__(self, key: str) -> Any: ... def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any: return self._state[key] @@ -202,12 +214,10 @@ def _check_frozen(self) -> None: ) @overload # type: ignore[override] - def __setitem__(self, key: AppKey[_T], value: _T) -> None: - ... + def __setitem__(self, key: AppKey[_T], value: _T) -> None: ... @overload - def __setitem__(self, key: str, value: Any) -> None: - ... + def __setitem__(self, key: str, value: Any) -> None: ... def __setitem__(self, key: Union[str, AppKey[_T]], value: Any) -> None: self._check_frozen() @@ -231,17 +241,17 @@ def __len__(self) -> int: def __iter__(self) -> Iterator[Union[str, AppKey[Any]]]: return iter(self._state) + def __hash__(self) -> int: + return id(self) + @overload # type: ignore[override] - def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]: - ... + def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]: ... @overload - def get(self, key: AppKey[_T], default: _U) -> Union[_T, _U]: - ... + def get(self, key: AppKey[_T], default: _U) -> Union[_T, _U]: ... @overload - def get(self, key: str, default: Any = ...) -> Any: - ... + def get(self, key: str, default: Any = ...) -> Any: ... def get(self, key: Union[str, AppKey[_T]], default: Any = None) -> Any: return self._state.get(key, default) @@ -290,6 +300,9 @@ def pre_freeze(self) -> None: self._on_shutdown.freeze() self._on_cleanup.freeze() self._middlewares_handlers = tuple(self._prepare_middleware()) + self._has_legacy_middlewares = any( + not new_style for _, new_style in self._middlewares_handlers + ) # If current app and any subapp do not have middlewares avoid run all # of the code footprint that it implies, which have a middleware @@ -334,7 +347,7 @@ async def handler(app: "Application") -> None: reg_handler("on_shutdown") reg_handler("on_cleanup") - def add_subapp(self, prefix: str, subapp: "Application") -> AbstractResource: + def add_subapp(self, prefix: str, subapp: "Application") -> PrefixedSubAppResource: if not isinstance(prefix, str): raise TypeError("Prefix must be str") prefix = prefix.rstrip("/") @@ -344,8 +357,8 @@ def add_subapp(self, prefix: str, subapp: "Application") -> AbstractResource: return self._add_subapp(factory, subapp) def _add_subapp( - self, resource_factory: Callable[[], AbstractResource], subapp: "Application" - ) -> AbstractResource: + self, resource_factory: Callable[[], _Resource], subapp: "Application" + ) -> _Resource: if self.frozen: raise RuntimeError("Cannot add sub application to frozen application") if subapp.frozen: @@ -359,7 +372,7 @@ def _add_subapp( subapp._set_loop(self._loop) return resource - def add_domain(self, domain: str, subapp: "Application") -> AbstractResource: + def add_domain(self, domain: str, subapp: "Application") -> MatchedSubAppResource: if not isinstance(domain, str): raise TypeError("Domain must be str") elif "*" in domain: @@ -520,29 +533,30 @@ async def _handle(self, request: Request) -> StreamResponse: match_info.freeze() - resp = None request._match_info = match_info - expect = request.headers.get(hdrs.EXPECT) - if expect: + + if request.headers.get(hdrs.EXPECT): resp = await match_info.expect_handler(request) await request.writer.drain() + if resp is not None: + return resp - if resp is None: - handler = match_info.handler + handler = match_info.handler - if self._run_middlewares: + if self._run_middlewares: + if not self._has_legacy_middlewares: + handler = _build_middlewares(handler, match_info.apps) + else: for app in match_info.apps[::-1]: for m, new_style in app._middlewares_handlers: # type: ignore[union-attr] if new_style: handler = update_wrapper( - partial(m, handler=handler), handler + partial(m, handler=handler), handler # type: ignore[misc] ) else: handler = await m(app, handler) # type: ignore[arg-type,assignment] - resp = await handler(request) - - return resp + return await handler(request) def __call__(self) -> "Application": """gunicorn compatibility""" @@ -585,7 +599,7 @@ async def _on_cleanup(self, app: Application) -> None: await it.__anext__() except StopAsyncIteration: pass - except Exception as exc: + except (Exception, asyncio.CancelledError) as exc: errors.append(exc) else: errors.append(RuntimeError(f"{it!r} has more than one 'yield'")) diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index 7dbe50f..f0de75e 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -1,7 +1,11 @@ import asyncio -import mimetypes import os import pathlib +import sys +from contextlib import suppress +from mimetypes import MimeTypes +from stat import S_ISREG +from types import MappingProxyType from typing import ( # noqa IO, TYPE_CHECKING, @@ -22,6 +26,8 @@ from .helpers import ETAG_ANY, ETag, must_be_empty_body from .typedefs import LooseHeaders, PathLike from .web_exceptions import ( + HTTPForbidden, + HTTPNotFound, HTTPNotModified, HTTPPartialContent, HTTPPreconditionFailed, @@ -40,6 +46,35 @@ NOSENDFILE: Final[bool] = bool(os.environ.get("AIOHTTP_NOSENDFILE")) +CONTENT_TYPES: Final[MimeTypes] = MimeTypes() + +if sys.version_info < (3, 9): + CONTENT_TYPES.encodings_map[".br"] = "br" + +# File extension to IANA encodings map that will be checked in the order defined. +ENCODING_EXTENSIONS = MappingProxyType( + {ext: CONTENT_TYPES.encodings_map[ext] for ext in (".br", ".gz")} +) + +FALLBACK_CONTENT_TYPE = "application/octet-stream" + +# Provide additional MIME type/extension pairs to be recognized. +# https://en.wikipedia.org/wiki/List_of_archive_formats#Compression_only +ADDITIONAL_CONTENT_TYPES = MappingProxyType( + { + "application/gzip": ".gz", + "application/x-brotli": ".br", + "application/x-bzip2": ".bz2", + "application/x-compress": ".Z", + "application/x-xz": ".xz", + } +) + +# Add custom pairs and clear the encodings map so guess_type ignores them. +CONTENT_TYPES.encodings_map.clear() +for content_type, extension in ADDITIONAL_CONTENT_TYPES.items(): + CONTENT_TYPES.add_type(content_type, extension) # type: ignore[attr-defined] + class FileResponse(StreamResponse): """A response object can be used to send files.""" @@ -101,10 +136,12 @@ async def _sendfile( return writer @staticmethod - def _strong_etag_match(etag_value: str, etags: Tuple[ETag, ...]) -> bool: + def _etag_match(etag_value: str, etags: Tuple[ETag, ...], *, weak: bool) -> bool: if len(etags) == 1 and etags[0].value == ETAG_ANY: return True - return any(etag.value == etag_value for etag in etags if not etag.is_weak) + return any( + etag.value == etag_value for etag in etags if weak or not etag.is_weak + ) async def _not_modified( self, request: "BaseRequest", etag_value: str, last_modified: float @@ -124,42 +161,60 @@ async def _precondition_failed( self.content_length = 0 return await super().prepare(request) - def _get_file_path_stat_and_gzip( - self, check_for_gzipped_file: bool - ) -> Tuple[pathlib.Path, os.stat_result, bool]: - """Return the file path, stat result, and gzip status. + def _get_file_path_stat_encoding( + self, accept_encoding: str + ) -> Tuple[pathlib.Path, os.stat_result, Optional[str]]: + """Return the file path, stat result, and encoding. + + If an uncompressed file is returned, the encoding is set to + :py:data:`None`. This method should be called from a thread executor since it calls os.stat which may block. """ - filepath = self._path - if check_for_gzipped_file: - gzip_path = filepath.with_name(filepath.name + ".gz") - try: - return gzip_path, gzip_path.stat(), True - except OSError: - # Fall through and try the non-gzipped file - pass + file_path = self._path + for file_extension, file_encoding in ENCODING_EXTENSIONS.items(): + if file_encoding not in accept_encoding: + continue - return filepath, filepath.stat(), False + compressed_path = file_path.with_suffix(file_path.suffix + file_extension) + with suppress(OSError): + # Do not follow symlinks and ignore any non-regular files. + st = compressed_path.lstat() + if S_ISREG(st.st_mode): + return compressed_path, st, file_encoding + + # Fallback to the uncompressed file + return file_path, file_path.stat(), None async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: - loop = asyncio.get_event_loop() + loop = asyncio.get_running_loop() # Encoding comparisons should be case-insensitive # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 - check_for_gzipped_file = ( - "gzip" in request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() - ) - filepath, st, gzip = await loop.run_in_executor( - None, self._get_file_path_stat_and_gzip, check_for_gzipped_file - ) + accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() + try: + file_path, st, file_encoding = await loop.run_in_executor( + None, self._get_file_path_stat_encoding, accept_encoding + ) + except OSError: + # Most likely to be FileNotFoundError or OSError for circular + # symlinks in python >= 3.13, so respond with 404. + self.set_status(HTTPNotFound.status_code) + return await super().prepare(request) + + # Forbid special files like sockets, pipes, devices, etc. + if not S_ISREG(st.st_mode): + self.set_status(HTTPForbidden.status_code) + return await super().prepare(request) etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}" last_modified = st.st_mtime - # https://tools.ietf.org/html/rfc7232#section-6 + # https://www.rfc-editor.org/rfc/rfc9110#section-13.1.1-2 ifmatch = request.if_match - if ifmatch is not None and not self._strong_etag_match(etag_value, ifmatch): + if ifmatch is not None and not self._etag_match( + etag_value, ifmatch, weak=False + ): return await self._precondition_failed(request) unmodsince = request.if_unmodified_since @@ -170,8 +225,11 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter ): return await self._precondition_failed(request) + # https://www.rfc-editor.org/rfc/rfc9110#section-13.1.2-2 ifnonematch = request.if_none_match - if ifnonematch is not None and self._strong_etag_match(etag_value, ifnonematch): + if ifnonematch is not None and self._etag_match( + etag_value, ifnonematch, weak=True + ): return await self._not_modified(request, etag_value, last_modified) modsince = request.if_modified_since @@ -182,15 +240,6 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter ): return await self._not_modified(request, etag_value, last_modified) - if hdrs.CONTENT_TYPE not in self.headers: - ct, encoding = mimetypes.guess_type(str(filepath)) - if not ct: - ct = "application/octet-stream" - should_set_ct = True - else: - encoding = "gzip" if gzip else None - should_set_ct = False - status = self._status file_size = st.st_size count = file_size @@ -265,11 +314,16 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter # return a HTTP 206 for a Range request. self.set_status(status) - if should_set_ct: - self.content_type = ct # type: ignore[assignment] - if encoding: - self.headers[hdrs.CONTENT_ENCODING] = encoding - if gzip: + # If the Content-Type header is not already set, guess it based on the + # extension of the request path. The encoding returned by guess_type + # can be ignored since the map was cleared above. + if hdrs.CONTENT_TYPE not in self.headers: + self.content_type = ( + CONTENT_TYPES.guess_type(self._path)[0] or FALLBACK_CONTENT_TYPE + ) + + if file_encoding: + self.headers[hdrs.CONTENT_ENCODING] = file_encoding self.headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING # Disable compression if we are already sending # a compressed file since we don't want to double @@ -293,7 +347,12 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter if count == 0 or must_be_empty_body(request.method, self.status): return await super().prepare(request) - fobj = await loop.run_in_executor(None, filepath.open, "rb") + try: + fobj = await loop.run_in_executor(None, file_path.open, "rb") + except PermissionError: + self.set_status(HTTPForbidden.status_code) + return await super().prepare(request) + if start: # be aware that start could be None or int=0 here. offset = start else: diff --git a/aiohttp/web_middlewares.py b/aiohttp/web_middlewares.py index 5da1533..2f1f5f5 100644 --- a/aiohttp/web_middlewares.py +++ b/aiohttp/web_middlewares.py @@ -110,7 +110,12 @@ async def impl(request: Request, handler: Handler) -> StreamResponse: def _fix_request_current_app(app: "Application") -> Middleware: @middleware async def impl(request: Request, handler: Handler) -> StreamResponse: - with request.match_info.set_current_app(app): + match_info = request.match_info + prev = match_info.current_app + match_info.current_app = app + try: return await handler(request) + finally: + match_info.current_app = prev return impl diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index f083b13..85eb70d 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -1,5 +1,6 @@ import asyncio import asyncio.streams +import sys import traceback import warnings from collections import deque @@ -26,7 +27,7 @@ from .abc import AbstractAccessLogger, AbstractStreamWriter from .base_protocol import BaseProtocol -from .helpers import ceil_timeout, set_exception +from .helpers import ceil_timeout from .http import ( HttpProcessingError, HttpRequestParser, @@ -37,7 +38,7 @@ from .log import access_logger, server_logger from .streams import EMPTY_PAYLOAD, StreamReader from .tcp_helpers import tcp_keepalive -from .web_exceptions import HTTPException +from .web_exceptions import HTTPException, HTTPInternalServerError from .web_log import AccessLogger from .web_request import BaseRequest from .web_response import Response, StreamResponse @@ -83,6 +84,9 @@ class PayloadAccessError(Exception): """Payload was accessed after response was sent.""" +_PAYLOAD_ACCESS_ERROR = PayloadAccessError() + + @attr.s(auto_attribs=True, frozen=True, slots=True) class _ErrInfo: status: int @@ -133,8 +137,6 @@ class RequestHandler(BaseProtocol): """ - KEEPALIVE_RESCHEDULE_DELAY = 1 - __slots__ = ( "_request_count", "_keepalive", @@ -142,12 +144,13 @@ class RequestHandler(BaseProtocol): "_request_handler", "_request_factory", "_tcp_keepalive", - "_keepalive_time", + "_next_keepalive_close_time", "_keepalive_handle", "_keepalive_timeout", "_lingering_time", "_messages", "_message_tail", + "_handler_waiter", "_waiter", "_task_handler", "_upgrade", @@ -162,6 +165,7 @@ class RequestHandler(BaseProtocol): "_force_close", "_current_request", "_timeout_ceil_threshold", + "_request_in_progress", ) def __init__( @@ -195,7 +199,7 @@ def __init__( self._tcp_keepalive = tcp_keepalive # placeholder to be replaced on keepalive timeout setup - self._keepalive_time = 0.0 + self._next_keepalive_close_time = 0.0 self._keepalive_handle: Optional[asyncio.Handle] = None self._keepalive_timeout = keepalive_timeout self._lingering_time = float(lingering_time) @@ -204,6 +208,7 @@ def __init__( self._message_tail = b"" self._waiter: Optional[asyncio.Future[None]] = None + self._handler_waiter: Optional[asyncio.Future[None]] = None self._task_handler: Optional[asyncio.Task[None]] = None self._upgrade = False @@ -237,6 +242,7 @@ def __init__( self._close = False self._force_close = False + self._request_in_progress = False def __repr__(self) -> str: return "<{} {}>".format( @@ -259,25 +265,44 @@ async def shutdown(self, timeout: Optional[float] = 15.0) -> None: if self._keepalive_handle is not None: self._keepalive_handle.cancel() - if self._waiter: - self._waiter.cancel() - - # wait for handlers - with suppress(asyncio.CancelledError, asyncio.TimeoutError): + # Wait for graceful handler completion + if self._request_in_progress: + # The future is only created when we are shutting + # down while the handler is still processing a request + # to avoid creating a future for every request. + self._handler_waiter = self._loop.create_future() + try: + async with ceil_timeout(timeout): + await self._handler_waiter + except (asyncio.CancelledError, asyncio.TimeoutError): + self._handler_waiter = None + if ( + sys.version_info >= (3, 11) + and (task := asyncio.current_task()) + and task.cancelling() + ): + raise + # Then cancel handler and wait + try: async with ceil_timeout(timeout): if self._current_request is not None: self._current_request._cancel(asyncio.CancelledError()) if self._task_handler is not None and not self._task_handler.done(): - await self._task_handler + await asyncio.shield(self._task_handler) + except (asyncio.CancelledError, asyncio.TimeoutError): + if ( + sys.version_info >= (3, 11) + and (task := asyncio.current_task()) + and task.cancelling() + ): + raise # force-close non-idle handler if self._task_handler is not None: self._task_handler.cancel() - if self.transport is not None: - self.transport.close() - self.transport = None + self.force_close() def connection_made(self, transport: asyncio.BaseTransport) -> None: super().connection_made(transport) @@ -286,22 +311,27 @@ def connection_made(self, transport: asyncio.BaseTransport) -> None: if self._tcp_keepalive: tcp_keepalive(real_transport) - self._task_handler = self._loop.create_task(self.start()) assert self._manager is not None self._manager.connection_made(self, real_transport) + loop = self._loop + if sys.version_info >= (3, 12): + task = asyncio.Task(self.start(), loop=loop, eager_start=True) + else: + task = loop.create_task(self.start()) + self._task_handler = task + def connection_lost(self, exc: Optional[BaseException]) -> None: if self._manager is None: return self._manager.connection_lost(self, exc) - super().connection_lost(exc) - # Grab value before setting _manager to None. handler_cancellation = self._manager.handler_cancellation + self.force_close() + super().connection_lost(exc) self._manager = None - self._force_close = True self._request_factory = None self._request_handler = None self._request_parser = None @@ -314,9 +344,6 @@ def connection_lost(self, exc: Optional[BaseException]) -> None: exc = ConnectionResetError("Connection lost") self._current_request._cancel(exc) - if self._waiter is not None: - self._waiter.cancel() - if handler_cancellation and self._task_handler is not None: self._task_handler.cancel() @@ -421,23 +448,21 @@ def log_exception(self, *args: Any, **kw: Any) -> None: self.logger.exception(*args, **kw) def _process_keepalive(self) -> None: + self._keepalive_handle = None if self._force_close or not self._keepalive: return - next = self._keepalive_time + self._keepalive_timeout + loop = self._loop + now = loop.time() + close_time = self._next_keepalive_close_time + if now <= close_time: + # Keep alive close check fired too early, reschedule + self._keepalive_handle = loop.call_at(close_time, self._process_keepalive) + return # handler in idle state - if self._waiter: - if self._loop.time() > next: - self.force_close() - return - - # not all request handlers are done, - # reschedule itself to next second - self._keepalive_handle = self._loop.call_later( - self.KEEPALIVE_RESCHEDULE_DELAY, - self._process_keepalive, - ) + if self._waiter and not self._waiter.done(): + self.force_close() async def _handle_request( self, @@ -445,7 +470,7 @@ async def _handle_request( start_time: float, request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]], ) -> Tuple[StreamResponse, bool]: - assert self._request_handler is not None + self._request_in_progress = True try: try: self._current_request = request @@ -454,16 +479,16 @@ async def _handle_request( self._current_request = None except HTTPException as exc: resp = exc - reset = await self.finish_response(request, resp, start_time) + resp, reset = await self.finish_response(request, resp, start_time) except asyncio.CancelledError: raise except asyncio.TimeoutError as exc: self.log_debug("Request handler timed out.", exc_info=exc) resp = self.handle_error(request, 504) - reset = await self.finish_response(request, resp, start_time) + resp, reset = await self.finish_response(request, resp, start_time) except Exception as exc: resp = self.handle_error(request, 500, exc) - reset = await self.finish_response(request, resp, start_time) + resp, reset = await self.finish_response(request, resp, start_time) else: # Deprecation warning (See #2415) if getattr(resp, "__http_exception__", False): @@ -474,7 +499,11 @@ async def _handle_request( DeprecationWarning, ) - reset = await self.finish_response(request, resp, start_time) + resp, reset = await self.finish_response(request, resp, start_time) + finally: + self._request_in_progress = False + if self._handler_waiter is not None: + self._handler_waiter.set_result(None) return resp, reset @@ -488,7 +517,7 @@ async def start(self) -> None: keep_alive(True) specified. """ loop = self._loop - handler = self._task_handler + handler = asyncio.current_task(loop) assert handler is not None manager = self._manager assert manager is not None @@ -503,8 +532,6 @@ async def start(self) -> None: # wait for next request self._waiter = loop.create_future() await self._waiter - except asyncio.CancelledError: - break finally: self._waiter = None @@ -524,12 +551,14 @@ async def start(self) -> None: request = self._request_factory(message, payload, self, writer, handler) try: # a new task is used for copy context vars (#3406) - task = self._loop.create_task( - self._handle_request(request, start, request_handler) - ) + coro = self._handle_request(request, start, request_handler) + if sys.version_info >= (3, 12): + task = asyncio.Task(coro, loop=loop, eager_start=True) + else: + task = loop.create_task(coro) try: resp, reset = await task - except (asyncio.CancelledError, ConnectionError): + except ConnectionError: self.log_debug("Ignored premature client disconnection") break @@ -553,27 +582,30 @@ async def start(self) -> None: now = loop.time() end_t = now + lingering_time - with suppress(asyncio.TimeoutError, asyncio.CancelledError): + try: while not payload.is_eof() and now < end_t: async with ceil_timeout(end_t - now): # read and ignore await payload.readany() now = loop.time() + except (asyncio.CancelledError, asyncio.TimeoutError): + if ( + sys.version_info >= (3, 11) + and (t := asyncio.current_task()) + and t.cancelling() + ): + raise # if payload still uncompleted if not payload.is_eof() and not self._force_close: self.log_debug("Uncompleted request.") self.close() - set_exception(payload, PayloadAccessError()) + payload.set_exception(_PAYLOAD_ACCESS_ERROR) except asyncio.CancelledError: - self.log_debug("Ignored premature client disconnection ") - break - except RuntimeError as exc: - if self.debug: - self.log_exception("Unhandled runtime exception", exc_info=exc) - self.force_close() + self.log_debug("Ignored premature client disconnection") + raise except Exception as exc: self.log_exception("Unhandled exception", exc_info=exc) self.force_close() @@ -584,11 +616,12 @@ async def start(self) -> None: if self._keepalive and not self._close: # start keep-alive timer if keepalive_timeout is not None: - now = self._loop.time() - self._keepalive_time = now + now = loop.time() + close_time = now + keepalive_timeout + self._next_keepalive_close_time = close_time if self._keepalive_handle is None: self._keepalive_handle = loop.call_at( - now + keepalive_timeout, self._process_keepalive + close_time, self._process_keepalive ) else: break @@ -601,7 +634,7 @@ async def start(self) -> None: async def finish_response( self, request: BaseRequest, resp: StreamResponse, start_time: float - ) -> bool: + ) -> Tuple[StreamResponse, bool]: """Prepare the response and write_eof, then log access. This has to @@ -609,6 +642,7 @@ async def finish_response( can get exception information. Returns True if the client disconnects prematurely. """ + request._finish() if self._request_parser is not None: self._request_parser.set_upgraded(False) self._upgrade = False @@ -619,22 +653,26 @@ async def finish_response( prepare_meth = resp.prepare except AttributeError: if resp is None: - raise RuntimeError("Missing return " "statement on request handler") + self.log_exception("Missing return statement on request handler") else: - raise RuntimeError( - "Web-handler should return " - "a response instance, " + self.log_exception( + "Web-handler should return a response instance, " "got {!r}".format(resp) ) + exc = HTTPInternalServerError() + resp = Response( + status=exc.status, reason=exc.reason, text=exc.text, headers=exc.headers + ) + prepare_meth = resp.prepare try: await prepare_meth(request) await resp.write_eof() except ConnectionError: self.log_access(request, resp, start_time) - return True - else: - self.log_access(request, resp, start_time) - return False + return resp, True + + self.log_access(request, resp, start_time) + return resp, False def handle_error( self, diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index 4bc670a..62a08ea 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -79,7 +79,7 @@ class FileField: filename: str file: io.BufferedReader content_type: str - headers: "CIMultiDictProxy[str]" + headers: CIMultiDictProxy[str] _TCHAR: Final[str] = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-" @@ -99,10 +99,10 @@ class FileField: qdtext=_QDTEXT, quoted_pair=_QUOTED_PAIR ) -_FORWARDED_PAIR: Final[ - str -] = r"({token})=({token}|{quoted_string})(:\d{{1,4}})?".format( - token=_TOKEN, quoted_string=_QUOTED_STRING +_FORWARDED_PAIR: Final[str] = ( + r"({token})=({token}|{quoted_string})(:\d{{1,4}})?".format( + token=_TOKEN, quoted_string=_QUOTED_STRING + ) ) _QUOTED_PAIR_REPLACE_RE: Final[Pattern[str]] = re.compile(r"\\([\t !-~])") @@ -169,12 +169,16 @@ def __init__( self._payload_writer = payload_writer self._payload = payload - self._headers = message.headers + self._headers: CIMultiDictProxy[str] = message.headers self._method = message.method self._version = message.version self._cache: Dict[str, Any] = {} url = message.url - if url.is_absolute(): + if url.absolute: + if scheme is not None: + url = url.with_scheme(scheme) + if host is not None: + url = url.with_host(host) # absolute URL is given, # override auto-calculating url, host, and scheme # all other properties should be good @@ -184,6 +188,10 @@ def __init__( self._rel_url = url.relative() else: self._rel_url = message.url + if scheme is not None: + self._cache["scheme"] = scheme + if host is not None: + self._cache["host"] = host self._post: Optional[MultiDictProxy[Union[str, bytes, FileField]]] = None self._read_bytes: Optional[bytes] = None @@ -197,10 +205,6 @@ def __init__( self._transport_sslcontext = transport.get_extra_info("sslcontext") self._transport_peername = transport.get_extra_info("peername") - if scheme is not None: - self._cache["scheme"] = scheme - if host is not None: - self._cache["host"] = host if remote is not None: self._cache["remote"] = remote @@ -235,7 +239,8 @@ def clone( # a copy semantic dct["headers"] = CIMultiDictProxy(CIMultiDict(headers)) dct["raw_headers"] = tuple( - (k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items() + (k.encode("utf-8"), v.encode("utf-8")) + for k, v in dct["headers"].items() ) message = self._message._replace(**dct) @@ -426,6 +431,10 @@ def host(self) -> str: - overridden value by .clone(host=new_host) call. - HOST HTTP header - socket.getfqdn() value + + For example, 'example.com' or 'localhost:8080'. + + For historical reasons, the port number may be included. """ host = self._message.headers.get(hdrs.HOST) if host is not None: @@ -449,8 +458,10 @@ def remote(self) -> Optional[str]: @reify def url(self) -> URL: - url = URL.build(scheme=self.scheme, host=self.host) - return url.join(self._rel_url) + """The full URL of the request.""" + # authority is used here because it may include the port number + # and we want yarl to parse it correctly + return URL.build(scheme=self.scheme, authority=self.host).join(self._rel_url) @reify def path(self) -> str: @@ -481,7 +492,7 @@ def raw_path(self) -> str: @reify def query(self) -> "MultiMapping[str]": """A multidict with all the variables in the query string.""" - return MultiDictProxy(self._rel_url.query) + return self._rel_url.query @reify def query_string(self) -> str: @@ -492,7 +503,7 @@ def query_string(self) -> str: return self._rel_url.query_string @reify - def headers(self) -> "MultiMapping[str]": + def headers(self) -> CIMultiDictProxy[str]: """A case-insensitive multidict proxy with all headers.""" return self._headers @@ -819,6 +830,18 @@ async def _prepare_hook(self, response: StreamResponse) -> None: def _cancel(self, exc: BaseException) -> None: set_exception(self._payload, exc) + def _finish(self) -> None: + if self._post is None or self.content_type != "multipart/form-data": + return + + # NOTE: Release file descriptors for the + # NOTE: `tempfile.Temporaryfile`-created `_io.BufferedRandom` + # NOTE: instances of files sent within multipart request body + # NOTE: via HTTP POST request. + for file_name, file_field_object in self._post.items(): + if isinstance(file_field_object, FileField): + file_field_object.file.close() + class Request(BaseRequest): @@ -898,4 +921,5 @@ async def _prepare_hook(self, response: StreamResponse) -> None: if match_info is None: return for app in match_info._apps: - await app.on_response_prepare.send(self, response) + if on_response_prepare := app.on_response_prepare: + await on_response_prepare.send(self, response) diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 40d6f01..2036a8d 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -41,6 +41,8 @@ from .payload import Payload from .typedefs import JSONEncoder, LooseHeaders +REASON_PHRASES = {http_status.value: http_status.phrase for http_status in HTTPStatus} + __all__ = ("ContentCoding", "StreamResponse", "Response", "json_response") @@ -52,6 +54,7 @@ BaseClass = collections.abc.MutableMapping +# TODO(py311): Convert to StrEnum for wider use class ContentCoding(enum.Enum): # The content codings that we have support for. # @@ -62,6 +65,8 @@ class ContentCoding(enum.Enum): identity = "identity" +CONTENT_CODINGS = {coding.value: coding for coding in ContentCoding} + ############################################################ # HTTP Response classes ############################################################ @@ -71,6 +76,8 @@ class StreamResponse(BaseClass, HeadersMixin): _length_check = True + _body: Union[None, bytes, bytearray, Payload] + def __init__( self, *, @@ -97,11 +104,11 @@ def __init__( else: self._headers = CIMultiDict() - self.set_status(status, reason) + self._set_status(status, reason) @property def prepared(self) -> bool: - return self._payload_writer is not None + return self._eof_sent or self._payload_writer is not None @property def task(self) -> "Optional[asyncio.Task[None]]": @@ -131,15 +138,17 @@ def set_status( status: int, reason: Optional[str] = None, ) -> None: - assert not self.prepared, ( - "Cannot change the response status code after " "the headers have been sent" - ) + assert ( + not self.prepared + ), "Cannot change the response status code after the headers have been sent" + self._set_status(status, reason) + + def _set_status(self, status: int, reason: Optional[str]) -> None: self._status = int(status) if reason is None: - try: - reason = HTTPStatus(self._status).phrase - except ValueError: - reason = "" + reason = REASON_PHRASES.get(self._status, "") + elif "\n" in reason: + raise ValueError("Reason cannot contain \\n") self._reason = reason @property @@ -175,7 +184,7 @@ def enable_compression( ) -> None: """Enables response compression encoding.""" # Backwards compatibility for when force was a bool <0.17. - if type(force) == bool: + if isinstance(force, bool): force = ContentCoding.deflate if force else ContentCoding.identity warnings.warn( "Using boolean for force is deprecated #3318", DeprecationWarning @@ -403,8 +412,8 @@ async def _start_compression(self, request: "BaseRequest") -> None: # Encoding comparisons should be case-insensitive # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() - for coding in ContentCoding: - if coding.value in accept_encoding: + for value, coding in CONTENT_CODINGS.items(): + if value in accept_encoding: await self._do_start_compression(coding) return @@ -478,7 +487,8 @@ async def _prepare_headers(self) -> None: # https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-13 if hdrs.TRANSFER_ENCODING in headers: del headers[hdrs.TRANSFER_ENCODING] - else: + elif self.content_length != 0: + # https://www.rfc-editor.org/rfc/rfc9110#section-8.3-5 headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream") headers.setdefault(hdrs.DATE, rfc822_formatted_time()) headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE) @@ -499,9 +509,7 @@ async def _write_headers(self) -> None: assert writer is not None # status line version = request.version - status_line = "HTTP/{}.{} {} {}".format( - version[0], version[1], self._status, self._reason - ) + status_line = f"HTTP/{version[0]}.{version[1]} {self._status} {self._reason}" await writer.write_headers(status_line, self._headers) async def write(self, data: bytes) -> None: @@ -650,21 +658,17 @@ def body(self) -> Optional[Union[bytes, Payload]]: return self._body @body.setter - def body(self, body: bytes) -> None: + def body(self, body: Any) -> None: if body is None: - self._body: Optional[bytes] = None - self._body_payload: bool = False + self._body = None elif isinstance(body, (bytes, bytearray)): self._body = body - self._body_payload = False else: try: self._body = body = payload.PAYLOAD_REGISTRY.get(body) except payload.LookupError: raise ValueError("Unsupported body type %r" % type(body)) - self._body_payload = True - headers = self._headers # set content-type @@ -673,7 +677,7 @@ def body(self, body: bytes) -> None: # copy payload headers if body.headers: - for (key, value) in body.headers.items(): + for key, value in body.headers.items(): if key not in headers: headers[key] = value @@ -697,7 +701,6 @@ def text(self, text: str) -> None: self.charset = "utf-8" self._body = text.encode(self.charset) - self._body_payload = False self._compressed_body = None @property @@ -706,12 +709,12 @@ def content_length(self) -> Optional[int]: return None if hdrs.CONTENT_LENGTH in self._headers: - return super().content_length + return int(self._headers[hdrs.CONTENT_LENGTH]) if self._compressed_body is not None: # Return length of the compressed body return len(self._compressed_body) - elif self._body_payload: + elif isinstance(self._body, Payload): # A payload without content length, or a compressed payload return None elif self._body is not None: @@ -736,9 +739,8 @@ async def write_eof(self, data: bytes = b"") -> None: if body is not None: if self._must_be_empty_body: await super().write_eof() - elif self._body_payload: - payload = cast(Payload, body) - await payload.write(self._payload_writer) + elif isinstance(self._body, Payload): + await self._body.write(self._payload_writer) await super().write_eof() else: await super().write_eof(cast(bytes, body)) @@ -746,14 +748,13 @@ async def write_eof(self, data: bytes = b"") -> None: await super().write_eof() async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: - if should_remove_content_length(request.method, self.status): - if hdrs.CONTENT_LENGTH in self._headers: + if hdrs.CONTENT_LENGTH in self._headers: + if should_remove_content_length(request.method, self.status): del self._headers[hdrs.CONTENT_LENGTH] - elif not self._chunked and hdrs.CONTENT_LENGTH not in self._headers: - if self._body_payload: - size = cast(Payload, self._body).size - if size is not None: - self._headers[hdrs.CONTENT_LENGTH] = str(size) + elif not self._chunked: + if isinstance(self._body, Payload): + if self._body.size is not None: + self._headers[hdrs.CONTENT_LENGTH] = str(self._body.size) else: body_len = len(self._body) if self._body else "0" # https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-7 @@ -765,7 +766,7 @@ async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: return await super()._start(request) async def _do_start_compression(self, coding: ContentCoding) -> None: - if self._body_payload or self._chunked: + if self._chunked or isinstance(self._body, Payload): return await super()._do_start_compression(coding) if coding != ContentCoding.identity: diff --git a/aiohttp/web_routedef.py b/aiohttp/web_routedef.py index d79cd32..9380214 100644 --- a/aiohttp/web_routedef.py +++ b/aiohttp/web_routedef.py @@ -162,12 +162,10 @@ def __repr__(self) -> str: return f"" @overload - def __getitem__(self, index: int) -> AbstractRouteDef: - ... + def __getitem__(self, index: int) -> AbstractRouteDef: ... @overload - def __getitem__(self, index: slice) -> List[AbstractRouteDef]: - ... + def __getitem__(self, index: slice) -> List[AbstractRouteDef]: ... def __getitem__(self, index): # type: ignore[no-untyped-def] return self._items[index] diff --git a/aiohttp/web_runner.py b/aiohttp/web_runner.py index 19a4441..0a237ed 100644 --- a/aiohttp/web_runner.py +++ b/aiohttp/web_runner.py @@ -3,7 +3,7 @@ import socket import warnings from abc import ABC, abstractmethod -from typing import Any, Awaitable, Callable, List, Optional, Set +from typing import Any, List, Optional, Set from yarl import URL @@ -108,7 +108,7 @@ def __init__( @property def name(self) -> str: scheme = "https" if self._ssl_context else "http" - host = "0.0.0.0" if self._host is None else self._host + host = "0.0.0.0" if not self._host else self._host return str(URL.build(scheme=scheme, host=host, port=self._port)) async def start(self) -> None: @@ -238,14 +238,7 @@ async def start(self) -> None: class BaseRunner(ABC): - __slots__ = ( - "shutdown_callback", - "_handle_signals", - "_kwargs", - "_server", - "_sites", - "_shutdown_timeout", - ) + __slots__ = ("_handle_signals", "_kwargs", "_server", "_sites", "_shutdown_timeout") def __init__( self, @@ -254,7 +247,6 @@ def __init__( shutdown_timeout: float = 60.0, **kwargs: Any, ) -> None: - self.shutdown_callback: Optional[Callable[[], Awaitable[None]]] = None self._handle_signals = handle_signals self._kwargs = kwargs self._server: Optional[Server] = None @@ -312,10 +304,6 @@ async def cleanup(self) -> None: await asyncio.sleep(0) self._server.pre_shutdown() await self.shutdown() - - if self.shutdown_callback: - await self.shutdown_callback() - await self._server.shutdown(self._shutdown_timeout) await self._cleanup_server() diff --git a/aiohttp/web_server.py b/aiohttp/web_server.py index 52faacb..ffc198d 100644 --- a/aiohttp/web_server.py +++ b/aiohttp/web_server.py @@ -1,9 +1,9 @@ """Low level HTTP server.""" + import asyncio from typing import Any, Awaitable, Callable, Dict, List, Optional # noqa from .abc import AbstractStreamWriter -from .helpers import get_running_loop from .http_parser import RawRequestMessage from .streams import StreamReader from .web_protocol import RequestHandler, _RequestFactory, _RequestHandler @@ -22,7 +22,7 @@ def __init__( loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any ) -> None: - self._loop = get_running_loop(loop) + self._loop = loop or asyncio.get_running_loop() self._connections: Dict[RequestHandler, asyncio.Transport] = {} self._kwargs = kwargs self.requests_count = 0 @@ -43,7 +43,12 @@ def connection_lost( self, handler: RequestHandler, exc: Optional[BaseException] = None ) -> None: if handler in self._connections: - del self._connections[handler] + if handler._task_handler: + handler._task_handler.add_done_callback( + lambda f: self._connections.pop(handler, None) + ) + else: + del self._connections[handler] def _make_request( self, diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 954291f..89abdc4 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -8,8 +8,8 @@ import keyword import os import re +import sys import warnings -from contextlib import contextmanager from functools import wraps from pathlib import Path from types import MappingProxyType @@ -38,7 +38,7 @@ cast, ) -from yarl import URL, __version__ as yarl_version # type: ignore[attr-defined] +from yarl import URL, __version__ as yarl_version from . import hdrs from .abc import AbstractMatchInfo, AbstractRouter, AbstractView @@ -78,6 +78,12 @@ else: BaseDict = dict +CIRCULAR_SYMLINK_ERROR = ( + (OSError,) + if sys.version_info < (3, 10) and sys.platform.startswith("win32") + else (RuntimeError,) if sys.version_info < (3, 13) else () +) + YARL_VERSION: Final[Tuple[int, ...]] = tuple(map(int, yarl_version.split(".")[:2])) HTTP_METHOD_RE: Final[Pattern[str]] = re.compile( @@ -199,7 +205,7 @@ def __init__( @wraps(handler) async def handler_wrapper(request: Request) -> StreamResponse: - result = old_handler(request) + result = old_handler(request) # type: ignore[call-arg] if asyncio.iscoroutine(result): result = await result assert isinstance(result, StreamResponse) @@ -286,8 +292,8 @@ def current_app(self) -> "Application": assert app is not None return app - @contextmanager - def set_current_app(self, app: "Application") -> Generator[None, None, None]: + @current_app.setter + def current_app(self, app: "Application") -> None: if DEBUG: # pragma: no cover if app not in self._apps: raise RuntimeError( @@ -295,12 +301,7 @@ def set_current_app(self, app: "Application") -> Generator[None, None, None]: self._apps, app ) ) - prev = self._current_app self._current_app = app - try: - yield - finally: - self._current_app = prev def freeze(self) -> None: self._frozen = True @@ -334,6 +335,8 @@ async def _default_expect_handler(request: Request) -> None: if request.version == HttpVersion11: if expect.lower() == "100-continue": await request.writer.write(b"HTTP/1.1 100 Continue\r\n\r\n") + # Reset output_size as we haven't started the main body yet. + request.writer.output_size = 0 else: raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect) @@ -372,7 +375,7 @@ def register_route(self, route: "ResourceRoute") -> None: async def resolve(self, request: Request) -> _Resolve: allowed_methods: Set[str] = set() - match_dict = self._match(request.rel_url.raw_path) + match_dict = self._match(request.rel_url.path_safe) if match_dict is None: return None, allowed_methods @@ -422,8 +425,7 @@ def _match(self, path: str) -> Optional[Dict[str, str]]: # string comparison is about 10 times faster than regexp matching if self._path == path: return {} - else: - return None + return None def raw_match(self, path: str) -> bool: return self._path == path @@ -447,6 +449,7 @@ class DynamicResource(Resource): def __init__(self, path: str, *, name: Optional[str] = None) -> None: super().__init__(name=name) + self._orig_path = path pattern = "" formatter = "" for part in ROUTE_RE.split(path): @@ -493,13 +496,12 @@ def _match(self, path: str) -> Optional[Dict[str, str]]: match = self._pattern.fullmatch(path) if match is None: return None - else: - return { - key: _unquote_path(value) for key, value in match.groupdict().items() - } + return { + key: _unquote_path_safe(value) for key, value in match.groupdict().items() + } def raw_match(self, path: str) -> bool: - return self._formatter == path + return self._orig_path == path def get_info(self) -> _InfoDict: return {"formatter": self._formatter, "pattern": self._pattern} @@ -557,14 +559,11 @@ def __init__( ) -> None: super().__init__(prefix, name=name) try: - directory = Path(directory) - if str(directory).startswith("~"): - directory = Path(os.path.expanduser(str(directory))) - directory = directory.resolve() - if not directory.is_dir(): - raise ValueError("Not a directory") - except (FileNotFoundError, ValueError) as error: - raise ValueError(f"No directory exists at '{directory}'") from error + directory = Path(directory).expanduser().resolve(strict=True) + except FileNotFoundError as error: + raise ValueError(f"'{directory}' does not exist") from error + if not directory.is_dir(): + raise ValueError(f"'{directory}' is not a directory") self._directory = directory self._show_index = show_index self._chunk_size = chunk_size @@ -644,7 +643,7 @@ def set_options_route(self, handler: Handler) -> None: ) async def resolve(self, request: Request) -> _Resolve: - path = request.rel_url.raw_path + path = request.rel_url.path_safe method = request.method allowed_methods = set(self._routes) if not path.startswith(self._prefix2) and path != self._prefix: @@ -653,7 +652,7 @@ async def resolve(self, request: Request) -> _Resolve: if method not in allowed_methods: return None, allowed_methods - match_dict = {"filename": _unquote_path(path[len(self._prefix) + 1 :])} + match_dict = {"filename": _unquote_path_safe(path[len(self._prefix) + 1 :])} return (UrlMappingMatchInfo(match_dict, self._routes[method]), allowed_methods) def __len__(self) -> int: @@ -664,59 +663,64 @@ def __iter__(self) -> Iterator[AbstractRoute]: async def _handle(self, request: Request) -> StreamResponse: rel_url = request.match_info["filename"] + filename = Path(rel_url) + if filename.anchor: + # rel_url is an absolute name like + # /static/\\machine_name\c$ or /static/D:\path + # where the static dir is totally different + raise HTTPForbidden() + + unresolved_path = self._directory.joinpath(filename) + loop = asyncio.get_running_loop() + return await loop.run_in_executor( + None, self._resolve_path_to_response, unresolved_path + ) + + def _resolve_path_to_response(self, unresolved_path: Path) -> StreamResponse: + """Take the unresolved path and query the file system to form a response.""" + # Check for access outside the root directory. For follow symlinks, URI + # cannot traverse out, but symlinks can. Otherwise, no access outside + # root is permitted. try: - filename = Path(rel_url) - if filename.anchor: - # rel_url is an absolute name like - # /static/\\machine_name\c$ or /static/D:\path - # where the static dir is totally different - raise HTTPForbidden() - unresolved_path = self._directory.joinpath(filename) if self._follow_symlinks: normalized_path = Path(os.path.normpath(unresolved_path)) normalized_path.relative_to(self._directory) - filepath = normalized_path.resolve() + file_path = normalized_path.resolve() else: - filepath = unresolved_path.resolve() - filepath.relative_to(self._directory) - except (ValueError, FileNotFoundError) as error: - # relatively safe - raise HTTPNotFound() from error - except HTTPForbidden: - raise - except Exception as error: - # perm error or other kind! - request.app.logger.exception(error) + file_path = unresolved_path.resolve() + file_path.relative_to(self._directory) + except (ValueError, *CIRCULAR_SYMLINK_ERROR) as error: + # ValueError is raised for the relative check. Circular symlinks + # raise here on resolving for python < 3.13. raise HTTPNotFound() from error - # on opening a dir, load its contents if allowed - if filepath.is_dir(): - if self._show_index: - try: + # if path is a directory, return the contents if permitted. Note the + # directory check will raise if a segment is not readable. + try: + if file_path.is_dir(): + if self._show_index: return Response( - text=self._directory_as_html(filepath), content_type="text/html" + text=self._directory_as_html(file_path), + content_type="text/html", ) - except PermissionError: + else: raise HTTPForbidden() - else: - raise HTTPForbidden() - elif filepath.is_file(): - return FileResponse(filepath, chunk_size=self._chunk_size) - else: - raise HTTPNotFound + except PermissionError as error: + raise HTTPForbidden() from error - def _directory_as_html(self, filepath: Path) -> str: - # returns directory's index as html + # Return the file response, which handles all other checks. + return FileResponse(file_path, chunk_size=self._chunk_size) - # sanity check - assert filepath.is_dir() + def _directory_as_html(self, dir_path: Path) -> str: + """returns directory's index as html.""" + assert dir_path.is_dir() - relative_path_to_dir = filepath.relative_to(self._directory).as_posix() + relative_path_to_dir = dir_path.relative_to(self._directory).as_posix() index_of = f"Index of /{html_escape(relative_path_to_dir)}" h1 = f"

{index_of}

" index_list = [] - dir_index = filepath.iterdir() + dir_index = dir_path.iterdir() for _file in sorted(dir_index): # show file url as relative to static path rel_path = _file.relative_to(self._directory).as_posix() @@ -750,13 +754,20 @@ class PrefixedSubAppResource(PrefixResource): def __init__(self, prefix: str, app: "Application") -> None: super().__init__(prefix) self._app = app - for resource in app.router.resources(): - resource.add_prefix(prefix) + self._add_prefix_to_resources(prefix) def add_prefix(self, prefix: str) -> None: super().add_prefix(prefix) - for resource in self._app.router.resources(): + self._add_prefix_to_resources(prefix) + + def _add_prefix_to_resources(self, prefix: str) -> None: + router = self._app.router + for resource in router.resources(): + # Since the canonical path of a resource is about + # to change, we need to unindex it and then reindex + router.unindex_resource(resource) resource.add_prefix(prefix) + router.index_resource(resource) def url_for(self, *args: str, **kwargs: str) -> URL: raise RuntimeError(".url_for() is not supported " "by sub-application root") @@ -765,11 +776,6 @@ def get_info(self) -> _InfoDict: return {"app": self._app, "prefix": self._prefix} async def resolve(self, request: Request) -> _Resolve: - if ( - not request.url.raw_path.startswith(self._prefix2) - and request.url.raw_path != self._prefix - ): - return None, set() match_info = await self._app.router.resolve(request) match_info.add_app(self._app) if isinstance(match_info.http_exception, HTTPMethodNotAllowed): @@ -1015,12 +1021,39 @@ def __init__(self) -> None: super().__init__() self._resources: List[AbstractResource] = [] self._named_resources: Dict[str, AbstractResource] = {} + self._resource_index: dict[str, list[AbstractResource]] = {} + self._matched_sub_app_resources: List[MatchedSubAppResource] = [] async def resolve(self, request: Request) -> UrlMappingMatchInfo: - method = request.method + resource_index = self._resource_index allowed_methods: Set[str] = set() - for resource in self._resources: + # Walk the url parts looking for candidates. We walk the url backwards + # to ensure the most explicit match is found first. If there are multiple + # candidates for a given url part because there are multiple resources + # registered for the same canonical path, we resolve them in a linear + # fashion to ensure registration order is respected. + url_part = request.rel_url.path_safe + while url_part: + for candidate in resource_index.get(url_part, ()): + match_dict, allowed = await candidate.resolve(request) + if match_dict is not None: + return match_dict + else: + allowed_methods |= allowed + if url_part == "/": + break + url_part = url_part.rpartition("/")[0] or "/" + + # + # We didn't find any candidates, so we'll try the matched sub-app + # resources which we have to walk in a linear fashion because they + # have regex/wildcard match rules and we cannot index them. + # + # For most cases we do not expect there to be many of these since + # currently they are only added by `add_domain` + # + for resource in self._matched_sub_app_resources: match_dict, allowed = await resource.resolve(request) if match_dict is not None: return match_dict @@ -1028,9 +1061,9 @@ async def resolve(self, request: Request) -> UrlMappingMatchInfo: allowed_methods |= allowed if allowed_methods: - return MatchInfoError(HTTPMethodNotAllowed(method, allowed_methods)) - else: - return MatchInfoError(HTTPNotFound()) + return MatchInfoError(HTTPMethodNotAllowed(request.method, allowed_methods)) + + return MatchInfoError(HTTPNotFound()) def __iter__(self) -> Iterator[str]: return iter(self._named_resources) @@ -1086,6 +1119,36 @@ def register_resource(self, resource: AbstractResource) -> None: self._named_resources[name] = resource self._resources.append(resource) + if isinstance(resource, MatchedSubAppResource): + # We cannot index match sub-app resources because they have match rules + self._matched_sub_app_resources.append(resource) + else: + self.index_resource(resource) + + def _get_resource_index_key(self, resource: AbstractResource) -> str: + """Return a key to index the resource in the resource index.""" + if "{" in (index_key := resource.canonical): + # strip at the first { to allow for variables, and than + # rpartition at / to allow for variable parts in the path + # For example if the canonical path is `/core/locations{tail:.*}` + # the index key will be `/core` since index is based on the + # url parts split by `/` + index_key = index_key.partition("{")[0].rpartition("/")[0] + return index_key.rstrip("/") or "/" + + def index_resource(self, resource: AbstractResource) -> None: + """Add a resource to the resource index.""" + resource_key = self._get_resource_index_key(resource) + # There may be multiple resources for a canonical path + # so we keep them in a list to ensure that registration + # order is respected. + self._resource_index.setdefault(resource_key, []).append(resource) + + def unindex_resource(self, resource: AbstractResource) -> None: + """Remove a resource from the resource index.""" + resource_key = self._get_resource_index_key(resource) + self._resource_index[resource_key].remove(resource) + def add_resource(self, path: str, *, name: Optional[str] = None) -> Resource: if path and not path.startswith("/"): raise ValueError("path should be started with / or be empty") @@ -1095,7 +1158,7 @@ def add_resource(self, path: str, *, name: Optional[str] = None) -> Resource: if resource.name == name and resource.raw_match(path): return cast(Resource, resource) if not ("{" in path or "}" in path or ROUTE_RE.search(path)): - resource = PlainResource(_requote_path(path), name=name) + resource = PlainResource(path, name=name) self.register_resource(resource) return resource resource = DynamicResource(path, name=name) @@ -1221,8 +1284,10 @@ def _quote_path(value: str) -> str: return URL.build(path=value, encoded=False).raw_path -def _unquote_path(value: str) -> str: - return URL.build(path=value, encoded=True).path +def _unquote_path_safe(value: str) -> str: + if "%" not in value: + return value + return value.replace("%2F", "/").replace("%25", "%") def _requote_path(value: str) -> str: diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 9fe6652..bf35f3b 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -11,7 +11,7 @@ from . import hdrs from .abc import AbstractStreamWriter -from .helpers import call_later, set_exception, set_result +from .helpers import calculate_timeout_when, set_exception, set_result from .http import ( WS_CLOSED_MESSAGE, WS_CLOSING_MESSAGE, @@ -81,67 +81,119 @@ def __init__( self._conn_lost = 0 self._close_code: Optional[int] = None self._loop: Optional[asyncio.AbstractEventLoop] = None - self._waiting: Optional[asyncio.Future[bool]] = None + self._waiting: bool = False + self._close_wait: Optional[asyncio.Future[None]] = None self._exception: Optional[BaseException] = None self._timeout = timeout self._receive_timeout = receive_timeout self._autoclose = autoclose self._autoping = autoping self._heartbeat = heartbeat + self._heartbeat_when = 0.0 self._heartbeat_cb: Optional[asyncio.TimerHandle] = None if heartbeat is not None: self._pong_heartbeat = heartbeat / 2.0 self._pong_response_cb: Optional[asyncio.TimerHandle] = None self._compress = compress self._max_msg_size = max_msg_size + self._ping_task: Optional[asyncio.Task[None]] = None def _cancel_heartbeat(self) -> None: - if self._pong_response_cb is not None: - self._pong_response_cb.cancel() - self._pong_response_cb = None - + self._cancel_pong_response_cb() if self._heartbeat_cb is not None: self._heartbeat_cb.cancel() self._heartbeat_cb = None + if self._ping_task is not None: + self._ping_task.cancel() + self._ping_task = None - def _reset_heartbeat(self) -> None: - self._cancel_heartbeat() + def _cancel_pong_response_cb(self) -> None: + if self._pong_response_cb is not None: + self._pong_response_cb.cancel() + self._pong_response_cb = None - if self._heartbeat is not None: - assert self._loop is not None - self._heartbeat_cb = call_later( - self._send_heartbeat, - self._heartbeat, - self._loop, - timeout_ceil_threshold=self._req._protocol._timeout_ceil_threshold - if self._req is not None - else 5, - ) + def _reset_heartbeat(self) -> None: + if self._heartbeat is None: + return + self._cancel_pong_response_cb() + req = self._req + timeout_ceil_threshold = ( + req._protocol._timeout_ceil_threshold if req is not None else 5 + ) + loop = self._loop + assert loop is not None + now = loop.time() + when = calculate_timeout_when(now, self._heartbeat, timeout_ceil_threshold) + self._heartbeat_when = when + if self._heartbeat_cb is None: + # We do not cancel the previous heartbeat_cb here because + # it generates a significant amount of TimerHandle churn + # which causes asyncio to rebuild the heap frequently. + # Instead _send_heartbeat() will reschedule the next + # heartbeat if it fires too early. + self._heartbeat_cb = loop.call_at(when, self._send_heartbeat) def _send_heartbeat(self) -> None: - if self._heartbeat is not None and not self._closed: - assert self._loop is not None - # fire-and-forget a task is not perfect but maybe ok for - # sending ping. Otherwise we need a long-living heartbeat - # task in the class. - self._loop.create_task(self._writer.ping()) # type: ignore[union-attr] - - if self._pong_response_cb is not None: - self._pong_response_cb.cancel() - self._pong_response_cb = call_later( - self._pong_not_received, - self._pong_heartbeat, - self._loop, - timeout_ceil_threshold=self._req._protocol._timeout_ceil_threshold - if self._req is not None - else 5, + self._heartbeat_cb = None + loop = self._loop + assert loop is not None and self._writer is not None + now = loop.time() + if now < self._heartbeat_when: + # Heartbeat fired too early, reschedule + self._heartbeat_cb = loop.call_at( + self._heartbeat_when, self._send_heartbeat ) + return + + req = self._req + timeout_ceil_threshold = ( + req._protocol._timeout_ceil_threshold if req is not None else 5 + ) + when = calculate_timeout_when(now, self._pong_heartbeat, timeout_ceil_threshold) + self._cancel_pong_response_cb() + self._pong_response_cb = loop.call_at(when, self._pong_not_received) + + if sys.version_info >= (3, 12): + # Optimization for Python 3.12, try to send the ping + # immediately to avoid having to schedule + # the task on the event loop. + ping_task = asyncio.Task(self._writer.ping(), loop=loop, eager_start=True) + else: + ping_task = loop.create_task(self._writer.ping()) + + if not ping_task.done(): + self._ping_task = ping_task + ping_task.add_done_callback(self._ping_task_done) + else: + self._ping_task_done(ping_task) + + def _ping_task_done(self, task: "asyncio.Task[None]") -> None: + """Callback for when the ping task completes.""" + if not task.cancelled() and (exc := task.exception()): + self._handle_ping_pong_exception(exc) + self._ping_task = None def _pong_not_received(self) -> None: if self._req is not None and self._req.transport is not None: - self._closed = True - self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) - self._exception = asyncio.TimeoutError() + self._handle_ping_pong_exception(asyncio.TimeoutError()) + + def _handle_ping_pong_exception(self, exc: BaseException) -> None: + """Handle exceptions raised during ping/pong processing.""" + if self._closed: + return + self._set_closed() + self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) + self._exception = exc + if self._waiting and not self._closing and self._reader is not None: + self._reader.feed_data(WSMessage(WSMsgType.ERROR, exc, None)) + + def _set_closed(self) -> None: + """Set the connection to closed. + + Cancel any heartbeat timers and set the closed flag. + """ + self._closed = True + self._cancel_heartbeat() async def prepare(self, request: BaseRequest) -> AbstractStreamWriter: # make pre-check to don't hide it by do_handshake() exceptions @@ -327,14 +379,14 @@ async def pong(self, message: bytes = b"") -> None: raise RuntimeError("Call .prepare() first") await self._writer.pong(message) - async def send_str(self, data: str, compress: Optional[bool] = None) -> None: + async def send_str(self, data: str, compress: Optional[int] = None) -> None: if self._writer is None: raise RuntimeError("Call .prepare() first") if not isinstance(data, str): raise TypeError("data argument must be str (%r)" % type(data)) await self._writer.send(data, binary=False, compress=compress) - async def send_bytes(self, data: bytes, compress: Optional[bool] = None) -> None: + async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None: if self._writer is None: raise RuntimeError("Call .prepare() first") if not isinstance(data, (bytes, bytearray, memoryview)): @@ -344,7 +396,7 @@ async def send_bytes(self, data: bytes, compress: Optional[bool] = None) -> None async def send_json( self, data: Any, - compress: Optional[bool] = None, + compress: Optional[int] = None, *, dumps: JSONEncoder = json.dumps, ) -> None: @@ -366,20 +418,10 @@ async def close( if self._writer is None: raise RuntimeError("Call .prepare() first") - self._cancel_heartbeat() - reader = self._reader - assert reader is not None - - # we need to break `receive()` cycle first, - # `close()` may be called from different task - if self._waiting is not None and not self._closed: - reader.feed_data(WS_CLOSING_MESSAGE, 0) - await self._waiting - if self._closed: return False + self._set_closed() - self._closed = True try: await self._writer.close(code, message) writer = self._payload_writer @@ -394,12 +436,21 @@ async def close( self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) return True + reader = self._reader + assert reader is not None + # we need to break `receive()` cycle before we can call + # `reader.read()` as `close()` may be called from different task + if self._waiting: + assert self._loop is not None + assert self._close_wait is None + self._close_wait = self._loop.create_future() + reader.feed_data(WS_CLOSING_MESSAGE) + await self._close_wait + if self._closing: self._close_transport() return True - reader = self._reader - assert reader is not None try: async with async_timeout.timeout(self._timeout): msg = await reader.read() @@ -411,7 +462,7 @@ async def close( self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) return True - if msg.type == WSMsgType.CLOSE: + if msg.type is WSMsgType.CLOSE: self._set_code_close_transport(msg.data) return True @@ -423,6 +474,7 @@ def _set_closing(self, code: WSCloseCode) -> None: """Set the close code and mark the connection as closing.""" self._closing = True self._close_code = code + self._cancel_heartbeat() def _set_code_close_transport(self, code: WSCloseCode) -> None: """Set the close code and close the transport.""" @@ -440,8 +492,9 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: loop = self._loop assert loop is not None + receive_timeout = timeout or self._receive_timeout while True: - if self._waiting is not None: + if self._waiting: raise RuntimeError("Concurrent call to receive() is not allowed") if self._closed: @@ -453,15 +506,22 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: return WS_CLOSING_MESSAGE try: - self._waiting = loop.create_future() + self._waiting = True try: - async with async_timeout.timeout(timeout or self._receive_timeout): + if receive_timeout: + # Entering the context manager and creating + # Timeout() object can take almost 50% of the + # run time in this loop so we avoid it if + # there is no read timeout. + async with async_timeout.timeout(receive_timeout): + msg = await self._reader.read() + else: msg = await self._reader.read() self._reset_heartbeat() finally: - waiter = self._waiting - set_result(waiter, True) - self._waiting = None + self._waiting = False + if self._close_wait: + set_result(self._close_wait, None) except asyncio.TimeoutError: raise except EofStream: @@ -478,7 +538,7 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: await self.close() return WSMessage(WSMsgType.ERROR, exc, None) - if msg.type == WSMsgType.CLOSE: + if msg.type is WSMsgType.CLOSE: self._set_closing(msg.data) # Could be closed while awaiting reader. if not self._closed and self._autoclose: @@ -487,19 +547,19 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: # want to drain any pending writes as it will # likely result writing to a broken pipe. await self.close(drain=False) - elif msg.type == WSMsgType.CLOSING: + elif msg.type is WSMsgType.CLOSING: self._set_closing(WSCloseCode.OK) - elif msg.type == WSMsgType.PING and self._autoping: + elif msg.type is WSMsgType.PING and self._autoping: await self.pong(msg.data) continue - elif msg.type == WSMsgType.PONG and self._autoping: + elif msg.type is WSMsgType.PONG and self._autoping: continue return msg async def receive_str(self, *, timeout: Optional[float] = None) -> str: msg = await self.receive(timeout) - if msg.type != WSMsgType.TEXT: + if msg.type is not WSMsgType.TEXT: raise TypeError( "Received message {}:{!r} is not WSMsgType.TEXT".format( msg.type, msg.data @@ -509,7 +569,7 @@ async def receive_str(self, *, timeout: Optional[float] = None) -> str: async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes: msg = await self.receive(timeout) - if msg.type != WSMsgType.BINARY: + if msg.type is not WSMsgType.BINARY: raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes") return cast(bytes, msg.data) @@ -535,5 +595,6 @@ def _cancel(self, exc: BaseException) -> None: # web_protocol calls this from connection_lost # or when the server is shutting down. self._closing = True + self._cancel_heartbeat() if self._reader is not None: set_exception(self._reader, exc) diff --git a/debian/.gitignore b/debian/.gitignore new file mode 100644 index 0000000..d36192a --- /dev/null +++ b/debian/.gitignore @@ -0,0 +1,2 @@ +/aiohttp.egg-info.safe +/files diff --git a/debian/changelog b/debian/changelog index 159e08f..93500df 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,78 @@ +python-aiohttp (3.10.10-2) unstable; urgency=medium + + * Team upload. + * (Build-)depend on python3-yarl (>= 1.12.0) (closes: #1086658). + + -- Colin Watson Wed, 13 Nov 2024 14:31:21 +0000 + +python-aiohttp (3.10.10-1) unstable; urgency=medium + + * New upstream release + + -- Piotr Ożarowski Tue, 29 Oct 2024 12:17:05 +0100 + +python-aiohttp (3.10.8-1) unstable; urgency=medium + + * New upstream release + + -- Piotr Ożarowski Tue, 01 Oct 2024 16:05:56 +0200 + +python-aiohttp (3.10.6-1) unstable; urgency=medium + + * New upstream release + + -- Piotr Ożarowski Fri, 27 Sep 2024 16:27:09 +0200 + +python-aiohttp (3.10.5-1) unstable; urgency=medium + + * New upstream release + + -- Piotr Ożarowski Sat, 24 Aug 2024 09:30:57 +0200 + +python-aiohttp (3.10.4-1) unstable; urgency=medium + + * Team upload. + * New upstream release. + + -- Colin Watson Mon, 19 Aug 2024 11:36:35 +0100 + +python-aiohttp (3.10.3-3) unstable; urgency=medium + + * Team upload. + * Use local Sphinx inventories where available; ignore other missing + references. + + -- Colin Watson Sun, 18 Aug 2024 13:50:33 +0100 + +python-aiohttp (3.10.3-2) unstable; urgency=medium + + * Team upload. + * Upload to unstable. + + -- Colin Watson Thu, 15 Aug 2024 16:45:04 +0100 + +python-aiohttp (3.10.3-1) experimental; urgency=medium + + * Rework build to avoid needing to filter out *.egg-info from upstream. + * Fix documentation build. + * New upstream release. + + -- Colin Watson Wed, 14 Aug 2024 18:19:39 +0100 + +python-aiohttp (3.10.1-1) experimental; urgency=medium + + * Team upload. + * New upstream release. + + -- Colin Watson Mon, 05 Aug 2024 15:14:44 +0100 + +python-aiohttp (3.10.0-1) experimental; urgency=medium + + * Team upload. + * New upstream release. + + -- Colin Watson Sun, 04 Aug 2024 19:05:16 +0100 + python-aiohttp (3.9.5-1) unstable; urgency=medium * Team upload. diff --git a/debian/clean b/debian/clean index 3f0ed21..0133a6c 100644 --- a/debian/clean +++ b/debian/clean @@ -1 +1,2 @@ +debian/aiohttp.egg-info.safe/ docs/_build/ diff --git a/debian/control b/debian/control index a4718d3..d8c3f40 100644 --- a/debian/control +++ b/debian/control @@ -14,12 +14,13 @@ Build-Depends: debhelper-compat (= 13), python3-sphinx, python3-multidict, python3-async-timeout, - python3-yarl, + python3-yarl (>= 1.12.0), cython3, python3-aiosignal, - python3-aiodns, + python3-aiodns (>= 3.2.0), python3-attr, python3-frozenlist, + python3-aiohappyeyeballs, # for tests: python3-pytest, python3-pytest-mock, @@ -27,6 +28,10 @@ Build-Depends: debhelper-compat (= 13), python3-charset-normalizer, python3-gunicorn, # for documentation: + python-pytest-doc, + python-yarl-doc, + python3-alabaster, + python3-doc, python3-sphinxcontrib.blockdiag Rules-Requires-Root: no Standards-Version: 4.7.0 diff --git a/debian/gbp.conf b/debian/gbp.conf index b4fd331..cec628c 100644 --- a/debian/gbp.conf +++ b/debian/gbp.conf @@ -1,3 +1,2 @@ [DEFAULT] pristine-tar = True -filter = [ '*.egg-info' ] diff --git a/debian/patches/0003-remove-forkme-button-from-docs-to-prevent-privacy-br.patch b/debian/patches/0003-remove-forkme-button-from-docs-to-prevent-privacy-br.patch index bf8aa4e..1f920d4 100644 --- a/debian/patches/0003-remove-forkme-button-from-docs-to-prevent-privacy-br.patch +++ b/debian/patches/0003-remove-forkme-button-from-docs-to-prevent-privacy-br.patch @@ -7,7 +7,7 @@ Subject: remove forkme button from docs to prevent privacy breach 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/conf.py b/docs/conf.py -index f21366f..2a7aa0c 100644 +index 23ac3e4..217c6ba 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -184,9 +184,9 @@ html_theme_options = { diff --git a/debian/patches/0004-remove-sphinxcontrib.towncrier-for-now.patch b/debian/patches/0004-remove-sphinxcontrib.towncrier-for-now.patch index eeaa95b..d0e7921 100644 --- a/debian/patches/0004-remove-sphinxcontrib.towncrier-for-now.patch +++ b/debian/patches/0004-remove-sphinxcontrib.towncrier-for-now.patch @@ -1,13 +1,14 @@ From: =?utf-8?q?Piotr_O=C5=BCarowski?= -Date: Sat, 27 Nov 2021 17:48:20 +0100 -Subject: remove sphinxcontrib.towncrier for now +Date: Wed, 14 Aug 2024 17:49:59 +0100 +Subject: Remove sphinxcontrib.towncrier for now -not packaged in Debian yet +The upstream sdist doesn't ship the `CHANGES/` directory. --- - docs/changes.rst | 10 +--------- - docs/conf.py | 2 +- - docs/index.rst | 2 +- - 3 files changed, 3 insertions(+), 11 deletions(-) + docs/changes.rst | 10 +--------- + docs/conf.py | 2 +- + docs/contributing.rst | 2 +- + docs/index.rst | 2 +- + 4 files changed, 4 insertions(+), 12 deletions(-) diff --git a/docs/changes.rst b/docs/changes.rst index 089f672..9b8cb7e 100644 @@ -28,20 +29,33 @@ index 089f672..9b8cb7e 100644 -.. include:: ../HISTORY.rst +Temporarily removed in Debian, sorry. diff --git a/docs/conf.py b/docs/conf.py -index 2a7aa0c..a1a93d7 100644 +index 217c6ba..20b9fda 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -59,7 +59,7 @@ extensions = [ "sphinx.ext.viewcode", # Third-party extensions: "sphinxcontrib.blockdiag", -- "sphinxcontrib.towncrier", # provides `towncrier-draft-entries` directive -+ # "sphinxcontrib.towncrier", # provides `towncrier-draft-entries` directive +- "sphinxcontrib.towncrier.ext", # provides `towncrier-draft-entries` directive ++ # "sphinxcontrib.towncrier.ext", # provides `towncrier-draft-entries` directive ] +diff --git a/docs/contributing.rst b/docs/contributing.rst +index 9abd367..b1a6a97 100644 +--- a/docs/contributing.rst ++++ b/docs/contributing.rst +@@ -273,7 +273,7 @@ include a changelog entry. + Changelog update + ---------------- + +-.. include:: ../CHANGES/README.rst ++Temporarily removed in Debian, sorry. + + + Making a pull request diff --git a/docs/index.rst b/docs/index.rst -index 4f55c5d..8bc5bdb 100644 +index 4ce20ac..1518c6c 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -124,7 +124,7 @@ will: diff --git a/debian/patches/0005-avoid-aiohttp-theme.patch b/debian/patches/0005-avoid-aiohttp-theme.patch new file mode 100644 index 0000000..7a5471b --- /dev/null +++ b/debian/patches/0005-avoid-aiohttp-theme.patch @@ -0,0 +1,55 @@ +From: Colin Watson +Date: Wed, 14 Aug 2024 18:11:32 +0100 +Subject: Don't use aiohttp_theme + +It isn't currently packaged in Debian. +--- + docs/conf.py | 28 +--------------------------- + 1 file changed, 1 insertion(+), 27 deletions(-) + +diff --git a/docs/conf.py b/docs/conf.py +index 20b9fda..16c00fa 100644 +--- a/docs/conf.py ++++ b/docs/conf.py +@@ -174,7 +174,7 @@ extlinks = { + + # The theme to use for HTML and HTML Help pages. See the documentation for + # a list of builtin themes. +-html_theme = "aiohttp_theme" ++html_theme = "alabaster" + + # Theme options are theme-specific and customize the look and feel of a theme + # further. For a list of options available for each theme, see the +@@ -187,32 +187,6 @@ html_theme_options = { + "github_button": False, + "github_type": "star", + "github_banner": False, +- "badges": [ +- { +- "image": f"{github_repo_url}/workflows/CI/badge.svg", +- "target": f"{github_repo_url}/actions?query=workflow%3ACI", +- "height": "20", +- "alt": "Azure Pipelines CI status", +- }, +- { +- "image": f"https://codecov.io/github/{github_repo_slug}/coverage.svg?branch=master", +- "target": f"https://codecov.io/github/{github_repo_slug}", +- "height": "20", +- "alt": "Code coverage status", +- }, +- { +- "image": f"https://badge.fury.io/py/{project}.svg", +- "target": f"https://badge.fury.io/py/{project}", +- "height": "20", +- "alt": "Latest PyPI package version", +- }, +- { +- "image": "https://badges.gitter.im/Join%20Chat.svg", +- "target": f"https://gitter.im/{github_repo_org}/Lobby", +- "height": "20", +- "alt": "Chat on Gitter", +- }, +- ], + } + + html_css_files = [ diff --git a/debian/patches/0006-use-local-documentation.patch b/debian/patches/0006-use-local-documentation.patch new file mode 100644 index 0000000..7cf957a --- /dev/null +++ b/debian/patches/0006-use-local-documentation.patch @@ -0,0 +1,51 @@ +From: Colin Watson +Date: Sun, 18 Aug 2024 12:01:35 +0100 +Subject: Use local Sphinx inventories where available + +--- + docs/conf.py | 32 +++++++++++++++++++------------- + 1 file changed, 19 insertions(+), 13 deletions(-) + +diff --git a/docs/conf.py b/docs/conf.py +index 16c00fa..d9a2cc3 100644 +--- a/docs/conf.py ++++ b/docs/conf.py +@@ -70,19 +70,25 @@ try: + except ImportError: + pass + +- +-intersphinx_mapping = { +- "pytest": ("http://docs.pytest.org/en/latest/", None), +- "python": ("http://docs.python.org/3", None), +- "multidict": ("https://multidict.readthedocs.io/en/stable/", None), +- "yarl": ("https://yarl.readthedocs.io/en/stable/", None), +- "aiosignal": ("https://aiosignal.readthedocs.io/en/stable/", None), +- "aiohttpjinja2": ("https://aiohttp-jinja2.readthedocs.io/en/stable/", None), +- "aiohttpremotes": ("https://aiohttp-remotes.readthedocs.io/en/stable/", None), +- "aiohttpsession": ("https://aiohttp-session.readthedocs.io/en/stable/", None), +- "aiohttpdemos": ("https://aiohttp-demos.readthedocs.io/en/latest/", None), +- "aiojobs": ("https://aiojobs.readthedocs.io/en/stable/", None), +-} ++def check_object_path(key, url, path): ++ if path is not None and os.path.isfile(path): ++ return {key: (url, path)} ++ return {} ++ ++intersphinx_mapping = {} ++for key, url, path in ( ++ ("pytest", "http://docs.pytest.org/en/latest/", "/usr/share/doc/python-pytest-doc/html/objects.inv"), ++ ("python", "http://docs.python.org/3", "/usr/share/doc/python3/html/objects.inv"), ++ ("multidict", "https://multidict.readthedocs.io/en/stable/", None), ++ ("yarl", "https://yarl.readthedocs.io/en/stable/", "/usr/share/doc/python-yarl-doc/html/objects.inv"), ++ ("aiosignal", "https://aiosignal.readthedocs.io/en/stable/", None), ++ ("aiohttpjinja2", "https://aiohttp-jinja2.readthedocs.io/en/stable/", None), ++ ("aiohttpremotes", "https://aiohttp-remotes.readthedocs.io/en/stable/", None), ++ ("aiohttpsession", "https://aiohttp-session.readthedocs.io/en/stable/", None), ++ ("aiohttpdemos", "https://aiohttp-demos.readthedocs.io/en/latest/", None), ++ ("aiojobs", "https://aiojobs.readthedocs.io/en/stable/", None), ++): ++ intersphinx_mapping.update(check_object_path(key, url, path)) + + # Add any paths that contain templates here, relative to this directory. + templates_path = ["_templates"] diff --git a/debian/patches/series b/debian/patches/series index 01e31f6..edcdff9 100644 --- a/debian/patches/series +++ b/debian/patches/series @@ -1,3 +1,5 @@ 0002-Add-shebang-to-examples.patch 0003-remove-forkme-button-from-docs-to-prevent-privacy-br.patch 0004-remove-sphinxcontrib.towncrier-for-now.patch +0005-avoid-aiohttp-theme.patch +0006-use-local-documentation.patch diff --git a/debian/rules b/debian/rules index cb11b50..1b90615 100755 --- a/debian/rules +++ b/debian/rules @@ -6,17 +6,30 @@ export PYBUILD_TEST_PYTEST=1 export PYBUILD_TEST_ARGS={dir}/tests export DEB_BUILD_MAINT_OPTIONS = hardening=+all +# Work around https://bugs.debian.org/1051837 +egg_info := aiohttp.egg-info +egg_info_safe := debian/aiohttp.egg-info.safe +define save-egg-info = +if [ ! -d $(egg_info_safe) ]; then cp -a $(egg_info) $(egg_info_safe); fi +endef +define restore-egg-info = +if [ -d $(egg_info_safe) ]; then rm -rf $(egg_info); cp -a $(egg_info_safe) $(egg_info); fi +endef + %: dh $@ --with python3,sphinxdoc --buildsystem pybuild override_dh_auto_build: - sed -i -e '/sphinxcontrib.asyncio/d' -e '/aiohttp_theme/d' docs/conf.py - ${MAKE} -C docs html || true + $(save-egg-info) + # Ignore missing references. + ${MAKE} -C docs html SPHINXOPTS= dh_auto_build override_dh_auto_clean: rm -rf .cache + $(save-egg-info) dh_auto_clean + $(restore-egg-info) override_dh_strip: dh_strip --dbgsym-migration='python3-aiohttp-dbg (<< 3.7.4-2~)' diff --git a/debian/source/options b/debian/source/options deleted file mode 100644 index b2b7b88..0000000 --- a/debian/source/options +++ /dev/null @@ -1 +0,0 @@ -extend-diff-ignore="^[^/]+\.egg-info/" diff --git a/docs/abc.rst b/docs/abc.rst index d269567..4eea671 100644 --- a/docs/abc.rst +++ b/docs/abc.rst @@ -181,3 +181,57 @@ Abstract Access Logger :param response: :class:`aiohttp.web.Response` object. :param float time: Time taken to serve the request. + + +Abstract Resolver +------------------------------- + +.. class:: AbstractResolver + + An abstract class, base for all resolver implementations. + + Method ``resolve`` should be overridden. + + .. method:: resolve(host, port, family) + + Resolve host name to IP address. + + :param str host: host name to resolve. + + :param int port: port number. + + :param int family: socket family. + + :return: list of :class:`aiohttp.abc.ResolveResult` instances. + + .. method:: close() + + Release resolver. + +.. class:: ResolveResult + + Result of host name resolution. + + .. attribute:: hostname + + The host name that was provided. + + .. attribute:: host + + The IP address that was resolved. + + .. attribute:: port + + The port that was resolved. + + .. attribute:: family + + The address family that was resolved. + + .. attribute:: proto + + The protocol that was resolved. + + .. attribute:: flags + + The flags that were resolved. diff --git a/docs/client_advanced.rst b/docs/client_advanced.rst index 958e31d..524b087 100644 --- a/docs/client_advanced.rst +++ b/docs/client_advanced.rst @@ -82,14 +82,14 @@ parameter of :class:`ClientSession` constructor:: between multiple requests:: async with aiohttp.ClientSession() as session: - await session.get( - 'http://httpbin.org/cookies/set?my_cookie=my_value') - filtered = session.cookie_jar.filter_cookies( - 'http://httpbin.org') - assert filtered['my_cookie'].value == 'my_value' - async with session.get('http://httpbin.org/cookies') as r: + async with session.get( + "http://httpbin.org/cookies/set?my_cookie=my_value", + allow_redirects=False + ) as resp: + assert resp.cookies["my_cookie"].value == "my_value" + async with session.get("http://httpbin.org/cookies") as r: json_body = await r.json() - assert json_body['cookies']['my_cookie'] == 'my_value' + assert json_body["cookies"]["my_cookie"] == "my_value" Response Headers and Cookies ---------------------------- @@ -618,7 +618,7 @@ Graceful Shutdown ----------------- When :class:`ClientSession` closes at the end of an ``async with`` -block (or through a direct :meth:`ClientSession.close()` call), the +block (or through a direct :meth:`ClientSession.close` call), the underlying connection remains open due to asyncio internal details. In practice, the underlying connection will close after a short while. However, if the event loop is stopped before the underlying @@ -658,7 +658,7 @@ on this. Character Set Detection ----------------------- -If you encounter a :exc:`UnicodeDecodeError` when using :meth:`ClientResponse.text()` +If you encounter a :exc:`UnicodeDecodeError` when using :meth:`ClientResponse.text` this may be because the response does not include the charset needed to decode the body. diff --git a/docs/client_quickstart.rst b/docs/client_quickstart.rst index 51b2ca1..f99339c 100644 --- a/docs/client_quickstart.rst +++ b/docs/client_quickstart.rst @@ -417,7 +417,8 @@ Timeouts Timeout settings are stored in :class:`ClientTimeout` data structure. By default *aiohttp* uses a *total* 300 seconds (5min) timeout, it means that the -whole operation should finish in 5 minutes. +whole operation should finish in 5 minutes. In order to allow time for DNS fallback, +the default ``sock_connect`` timeout is 30 seconds. The value could be overridden by *timeout* parameter for the session (specified in seconds):: diff --git a/docs/client_reference.rst b/docs/client_reference.rst index fdf66e1..c48e87e 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -41,17 +41,21 @@ The client session supports the context manager protocol for self closing. connector=None, cookies=None, \ headers=None, skip_auto_headers=None, \ auth=None, json_serialize=json.dumps, \ + request_class=ClientRequest, \ + response_class=ClientResponse, \ + ws_response_class=ClientWebSocketResponse, \ version=aiohttp.HttpVersion11, \ - cookie_jar=None, read_timeout=None, \ - conn_timeout=None, \ - timeout=sentinel, \ - raise_for_status=False, \ + cookie_jar=None, \ connector_owner=True, \ + raise_for_status=False, \ + timeout=sentinel, \ auto_decompress=True, \ - read_bufsize=2**16, \ - requote_redirect_url=True, \ trust_env=False, \ + requote_redirect_url=True, \ trace_configs=None, \ + read_bufsize=2**16, \ + max_line_size=8190, \ + max_field_size=8190, \ fallback_charset_resolver=lambda r, b: "utf-8") The class for creating client sessions and making requests. @@ -67,17 +71,6 @@ The client session supports the context manager protocol for self closing. :param aiohttp.BaseConnector connector: BaseConnector sub-class instance to support connection pooling. - :param loop: :ref:`event loop` used for - processing HTTP requests. - - If *loop* is ``None`` the constructor - borrows it from *connector* if specified. - - :func:`asyncio.get_event_loop` is used for getting default event - loop otherwise. - - .. deprecated:: 2.0 - :param dict cookies: Cookies to send with the request (optional) :param headers: HTTP Headers to send with every request (optional). @@ -99,7 +92,20 @@ The client session supports the context manager protocol for self closing. Iterable of :class:`str` or :class:`~multidict.istr` (optional) :param aiohttp.BasicAuth auth: an object that represents HTTP Basic - Authorization (optional) + Authorization (optional). It will be included + with any request to any origin and will not be + removed, event during redirect to a different + origin. + + :param collections.abc.Callable json_serialize: Json *serializer* callable. + + By default :func:`json.dumps` function. + + :param aiohttp.ClientRequest request_class: Custom class to use for client requests. + + :param ClientResponse response_class: Custom class to use for client responses. + + :param ClientWebSocketResponse ws_response_class: Custom class to use for websocket responses. :param version: supported HTTP version, ``HTTP 1.1`` by default. @@ -116,16 +122,20 @@ The client session supports the context manager protocol for self closing. :class:`aiohttp.DummyCookieJar` instance can be provided. - :param collections.abc.Callable json_serialize: Json *serializer* callable. + :param bool connector_owner: - By default :func:`json.dumps` function. + Close connector instance on session closing. + + Setting the parameter to ``False`` allows to share + connection pool between sessions without sharing session state: + cookies etc. :param bool raise_for_status: - Automatically call :meth:`ClientResponse.raise_for_status()` for + Automatically call :meth:`ClientResponse.raise_for_status` for each response, ``False`` by default. - This parameter can be overridden when you making a request, e.g.:: + This parameter can be overridden when making a request, e.g.:: client_session = aiohttp.ClientSession(raise_for_status=True) resp = await client_session.get(url, raise_for_status=False) @@ -154,43 +164,18 @@ The client session supports the context manager protocol for self closing. overwrite it on a per-request basis. :param timeout: a :class:`ClientTimeout` settings structure, 300 seconds (5min) - total timeout by default. + total timeout, 30 seconds socket connect timeout by default. .. versionadded:: 3.3 - :param float read_timeout: Request operations timeout. ``read_timeout`` is - cumulative for all request operations (request, redirects, responses, - data consuming). By default, the read timeout is 5*60 seconds. - Use ``None`` or ``0`` to disable timeout checks. - - .. deprecated:: 3.3 - - Use ``timeout`` parameter instead. - - :param float conn_timeout: timeout for connection establishing - (optional). Values ``0`` or ``None`` mean no timeout. + .. versionchanged:: 3.10.9 - .. deprecated:: 3.3 - - Use ``timeout`` parameter instead. - - :param bool connector_owner: - - Close connector instance on session closing. - - Setting the parameter to ``False`` allows to share - connection pool between sessions without sharing session state: - cookies etc. + The default value for the ``sock_connect`` timeout has been changed to 30 seconds. :param bool auto_decompress: Automatically decompress response body (``True`` by default). .. versionadded:: 2.3 - :param int read_bufsize: Size of the read buffer (:attr:`ClientResponse.content`). - 64 KiB by default. - - .. versionadded:: 3.7 - :param bool trust_env: Trust environment settings for proxy configuration if the parameter is ``True`` (``False`` by default). See :ref:`aiohttp-client-proxy-support` for more information. @@ -227,6 +212,15 @@ The client session supports the context manager protocol for self closing. disabling. See :ref:`aiohttp-client-tracing-reference` for more information. + :param int read_bufsize: Size of the read buffer (:attr:`ClientResponse.content`). + 64 KiB by default. + + .. versionadded:: 3.7 + + :param int max_line_size: Maximum allowed size of lines in responses. + + :param int max_field_size: Maximum allowed size of header fields in responses. + :param Callable[[ClientResponse,bytes],str] fallback_charset_resolver: A :term:`callable` that accepts a :class:`ClientResponse` and the :class:`bytes` contents, and returns a :class:`str` which will be used as @@ -333,7 +327,7 @@ The client session supports the context manager protocol for self closing. .. attribute:: raise_for_status - Should :meth:`ClientResponse.raise_for_status()` be called for each response + Should :meth:`ClientResponse.raise_for_status` be called for each response Either :class:`bool` or :class:`collections.abc.Callable` @@ -371,12 +365,15 @@ The client session supports the context manager protocol for self closing. max_redirects=10,\ compress=None, chunked=None, expect100=False, raise_for_status=None,\ read_until_eof=True, \ - read_bufsize=None, \ proxy=None, proxy_auth=None,\ - timeout=sentinel, ssl=None, \ - verify_ssl=None, fingerprint=None, \ - ssl_context=None, proxy_headers=None, \ - server_hostname=None, auto_decompress=None) + timeout=sentinel, ssl=True, \ + server_hostname=None, \ + proxy_headers=None, \ + trace_request_ctx=None, \ + read_bufsize=None, \ + auto_decompress=None, \ + max_line_size=None, \ + max_field_size=None) :async: :noindexentry: @@ -459,7 +456,7 @@ The client session supports the context manager protocol for self closing. :param bool expect100: Expect 100-continue response from server. ``False`` by default (optional). - :param bool raise_for_status: Automatically call :meth:`ClientResponse.raise_for_status()` for + :param bool raise_for_status: Automatically call :meth:`ClientResponse.raise_for_status` for response if set to ``True``. If set to ``None`` value from ``ClientSession`` will be used. ``None`` by default (optional). @@ -470,12 +467,6 @@ The client session supports the context manager protocol for self closing. does not have Content-Length header. ``True`` by default (optional). - :param int read_bufsize: Size of the read buffer (:attr:`ClientResponse.content`). - ``None`` by default, - it means that the session global value is used. - - .. versionadded:: 3.7 - :param proxy: Proxy URL, :class:`str` or :class:`~yarl.URL` (optional) :param aiohttp.BasicAuth proxy_auth: an object that represents proxy HTTP @@ -491,7 +482,7 @@ The client session supports the context manager protocol for self closing. If :class:`float` is passed it is a *total* timeout (in seconds). - :param ssl: SSL validation mode. ``None`` for default SSL check + :param ssl: SSL validation mode. ``True`` for default SSL check (:func:`ssl.create_default_context` is used), ``False`` for skip SSL certificate validation, :class:`aiohttp.Fingerprint` for fingerprint @@ -503,29 +494,6 @@ The client session supports the context manager protocol for self closing. .. versionadded:: 3.0 - :param bool verify_ssl: Perform SSL certificate validation for - *HTTPS* requests (enabled by default). May be disabled to - skip validation for sites with invalid certificates. - - .. versionadded:: 2.3 - - .. deprecated:: 3.0 - - Use ``ssl=False`` - - :param bytes fingerprint: Pass the SHA256 digest of the expected - certificate in DER format to verify that the certificate the - server presents matches. Useful for `certificate pinning - `_. - - Warning: use of MD5 or SHA1 digests is insecure and removed. - - .. versionadded:: 2.3 - - .. deprecated:: 3.0 - - Use ``ssl=aiohttp.Fingerprint(digest)`` - :param str server_hostname: Sets or overrides the host name that the target server’s certificate will be matched against. @@ -533,18 +501,6 @@ The client session supports the context manager protocol for self closing. .. versionadded:: 3.9 - :param ssl.SSLContext ssl_context: ssl context used for processing - *HTTPS* requests (optional). - - *ssl_context* may be used for configuring certification - authority channel, supported SSL options etc. - - .. versionadded:: 2.3 - - .. deprecated:: 3.0 - - Use ``ssl=ssl_context`` - :param collections.abc.Mapping proxy_headers: HTTP headers to send to the proxy if the parameter proxy has been provided. @@ -557,10 +513,20 @@ The client session supports the context manager protocol for self closing. .. versionadded:: 3.0 + :param int read_bufsize: Size of the read buffer (:attr:`ClientResponse.content`). + ``None`` by default, + it means that the session global value is used. + + .. versionadded:: 3.7 + :param bool auto_decompress: Automatically decompress response body. Overrides :attr:`ClientSession.auto_decompress`. May be used to enable/disable auto decompression on a per-request basis. + :param int max_line_size: Maximum allowed size of lines in responses. + + :param int max_field_size: Maximum allowed size of header fields in responses. + :return ClientResponse: a :class:`client response ` object. @@ -696,7 +662,7 @@ The client session supports the context manager protocol for self closing. origin=None, \ params=None, \ headers=None, \ - proxy=None, proxy_auth=None, ssl=None, \ + proxy=None, proxy_auth=None, ssl=True, \ verify_ssl=None, fingerprint=None, \ ssl_context=None, proxy_headers=None, \ compress=0, max_msg_size=4194304) @@ -760,7 +726,7 @@ The client session supports the context manager protocol for self closing. :param aiohttp.BasicAuth proxy_auth: an object that represents proxy HTTP Basic Authorization (optional) - :param ssl: SSL validation mode. ``None`` for default SSL check + :param ssl: SSL validation mode. ``True`` for default SSL check (:func:`ssl.create_default_context` is used), ``False`` for skip SSL certificate validation, :class:`aiohttp.Fingerprint` for fingerprint @@ -785,7 +751,7 @@ The client session supports the context manager protocol for self closing. :param bytes fingerprint: Pass the SHA256 digest of the expected certificate in DER format to verify that the certificate the server presents matches. Useful for `certificate pinning - `_. + `_. Note: use of MD5 or SHA1 digests is insecure and deprecated. @@ -913,7 +879,7 @@ certification chaining. ``False`` by default (optional). :param bool raise_for_status: Automatically call - :meth:`ClientResponse.raise_for_status()` + :meth:`ClientResponse.raise_for_status` for response if set to ``True``. If set to ``None`` value from ``ClientSession`` will be used. @@ -935,7 +901,7 @@ certification chaining. .. versionadded:: 3.7 :param timeout: a :class:`ClientTimeout` settings structure, 300 seconds (5min) - total timeout by default. + total timeout, 30 seconds socket connect timeout by default. :param loop: :ref:`event loop` used for processing HTTP requests. @@ -990,7 +956,7 @@ is controlled by *force_close* constructor's parameter). flag. :param int limit: total number simultaneous connections. If *limit* is - ``None`` the connector has no limit (default: 100). + ``0`` the connector has no limit (default: 100). :param int limit_per_host: limit simultaneous connections to the same endpoint. Endpoints are the same if they are @@ -1035,7 +1001,7 @@ is controlled by *force_close* constructor's parameter). Endpoints are the same if they are have equal ``(host, port, is_ssl)`` triple. - If *limit_per_host* is ``None`` the connector has no limit per host. + If *limit_per_host* is ``0`` the connector has no limit per host. Read-only property. @@ -1066,12 +1032,13 @@ is controlled by *force_close* constructor's parameter). overridden in subclasses. -.. class:: TCPConnector(*, ssl=None, verify_ssl=True, fingerprint=None, \ +.. class:: TCPConnector(*, ssl=True, verify_ssl=True, fingerprint=None, \ use_dns_cache=True, ttl_dns_cache=10, \ family=0, ssl_context=None, local_addr=None, \ resolver=None, keepalive_timeout=sentinel, \ force_close=False, limit=100, limit_per_host=0, \ - enable_cleanup_closed=False, loop=None) + enable_cleanup_closed=False, timeout_ceil_threshold=5, \ + happy_eyeballs_delay=0.25, interleave=None, loop=None) Connector for working with *HTTP* and *HTTPS* via *TCP* sockets. @@ -1083,7 +1050,7 @@ is controlled by *force_close* constructor's parameter). Constructor accepts all parameters suitable for :class:`BaseConnector` plus several TCP-specific ones: - :param ssl: SSL validation mode. ``None`` for default SSL check + :param ssl: SSL validation mode. ``True`` for default SSL check (:func:`ssl.create_default_context` is used), ``False`` for skip SSL certificate validation, :class:`aiohttp.Fingerprint` for fingerprint @@ -1106,7 +1073,7 @@ is controlled by *force_close* constructor's parameter). :param bytes fingerprint: pass the SHA256 digest of the expected certificate in DER format to verify that the certificate the server presents matches. Useful for `certificate pinning - `_. + `_. Note: use of MD5 or SHA1 digests is insecure and deprecated. @@ -1129,7 +1096,7 @@ is controlled by *force_close* constructor's parameter). updated refreshing each entry after N seconds. :param int limit: total number simultaneous connections. If *limit* is - ``None`` the connector has no limit (default: 100). + ``0`` the connector has no limit (default: 100). :param int limit_per_host: limit simultaneous connections to the same endpoint. Endpoints are the same if they are @@ -1174,6 +1141,24 @@ is controlled by *force_close* constructor's parameter). If this parameter is set to True, aiohttp additionally aborts underlining transport after 2 seconds. It is off by default. + :param float happy_eyeballs_delay: The amount of time in seconds to wait for a + connection attempt to complete, before starting the next attempt in parallel. + This is the “Connection Attempt Delay” as defined in RFC 8305. To disable + Happy Eyeballs, set this to ``None``. The default value recommended by the + RFC is 0.25 (250 milliseconds). + + .. versionadded:: 3.10 + + :param int interleave: controls address reordering when a host name resolves + to multiple IP addresses. If ``0`` or unspecified, no reordering is done, and + addresses are tried in the order returned by the resolver. If a positive + integer is specified, the addresses are interleaved by address family, and + the given integer is interpreted as “First Address Family Count” as defined + in RFC 8305. The default is ``0`` if happy_eyeballs_delay is not specified, and + ``1`` if it is. + + .. versionadded:: 3.10 + .. attribute:: family *TCP* socket family e.g. :data:`socket.AF_INET` or @@ -1780,6 +1765,26 @@ Utilities .. versionadded:: 3.8 +.. class:: ContentDisposition + + A data class to represent the Content-Disposition header, + available as :attr:`ClientResponse.content_disposition` attribute. + + .. attribute:: type + + A :class:`str` instance. Value of Content-Disposition header + itself, e.g. ``attachment``. + + .. attribute:: filename + + A :class:`str` instance. Content filename extracted from + parameters. May be ``None``. + + .. attribute:: parameters + + Read-only mapping contains all parameters. + + .. class:: RequestInfo() A data class with request URL and headers from :class:`~aiohttp.ClientRequest` @@ -2096,23 +2101,39 @@ All exceptions are available as members of *aiohttp* module. Invalid URL, :class:`yarl.URL` instance. -.. class:: ContentDisposition + .. attribute:: description - Represent Content-Disposition header + Invalid URL description, :class:`str` instance or :data:`None`. - .. attribute:: type +.. exception:: InvalidUrlClientError - A :class:`str` instance. Value of Content-Disposition header - itself, e.g. ``attachment``. + Base class for all errors related to client url. - .. attribute:: filename + Derived from :exc:`InvalidURL` - A :class:`str` instance. Content filename extracted from - parameters. May be ``None``. +.. exception:: RedirectClientError - .. attribute:: parameters + Base class for all errors related to client redirects. - Read-only mapping contains all parameters. + Derived from :exc:`ClientError` + +.. exception:: NonHttpUrlClientError + + Base class for all errors related to non http client urls. + + Derived from :exc:`ClientError` + +.. exception:: InvalidUrlRedirectClientError + + Redirect URL is malformed, e.g. it does not contain host part. + + Derived from :exc:`InvalidUrlClientError` and :exc:`RedirectClientError` + +.. exception:: NonHttpUrlRedirectClientError + + Redirect URL does not contain http schema. + + Derived from :exc:`RedirectClientError` and :exc:`NonHttpUrlClientError` Response errors ^^^^^^^^^^^^^^^ @@ -2190,6 +2211,10 @@ Connection errors Derived from :exc:`ClientError` +.. class:: ClientConnectionResetError + + Derived from :exc:`ClientConnectionError` and :exc:`ConnectionResetError` + .. class:: ClientOSError Subset of connection errors that are initiated by an :exc:`OSError` @@ -2203,6 +2228,12 @@ Connection errors Derived from :exc:`ClientOSError` +.. class:: ClientConnectorDNSError + + DNS resolution error. + + Derived from :exc:`ClientConnectorError` + .. class:: ClientProxyConnectionError Derived from :exc:`ClientConnectorError` @@ -2252,8 +2283,22 @@ Connection errors Server operation timeout: read timeout, etc. + To catch all timeouts, including the ``total`` timeout, use + :exc:`asyncio.TimeoutError`. + Derived from :exc:`ServerConnectionError` and :exc:`asyncio.TimeoutError` +.. class:: ConnectionTimeoutError + + Connection timeout on ``connect`` and ``sock_connect`` timeouts. + + Derived from :exc:`ServerTimeoutError` + +.. class:: SocketTimeoutError + + Reading from socket timeout on ``sock_read`` timeout. + + Derived from :exc:`ServerTimeoutError` Hierarchy of exceptions ^^^^^^^^^^^^^^^^^^^^^^^ @@ -2262,12 +2307,16 @@ Hierarchy of exceptions * :exc:`ClientConnectionError` + * :exc:`ClientConnectionResetError` + * :exc:`ClientOSError` * :exc:`ClientConnectorError` * :exc:`ClientProxyConnectionError` + * :exc:`ClientConnectorDNSError` + * :exc:`ClientSSLError` * :exc:`ClientConnectorCertificateError` @@ -2284,6 +2333,10 @@ Hierarchy of exceptions * :exc:`ServerTimeoutError` + * :exc:`ConnectionTimeoutError` + + * :exc:`SocketTimeoutError` + * :exc:`ClientPayloadError` * :exc:`ClientResponseError` @@ -2297,3 +2350,17 @@ Hierarchy of exceptions * :exc:`WSServerHandshakeError` * :exc:`InvalidURL` + + * :exc:`InvalidUrlClientError` + + * :exc:`InvalidUrlRedirectClientError` + + * :exc:`NonHttpUrlClientError` + + * :exc:`NonHttpUrlRedirectClientError` + + * :exc:`RedirectClientError` + + * :exc:`InvalidUrlRedirectClientError` + + * :exc:`NonHttpUrlRedirectClientError` diff --git a/docs/conf.py b/docs/conf.py index f21366f..23ac3e4 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -59,7 +59,7 @@ "sphinx.ext.viewcode", # Third-party extensions: "sphinxcontrib.blockdiag", - "sphinxcontrib.towncrier", # provides `towncrier-draft-entries` directive + "sphinxcontrib.towncrier.ext", # provides `towncrier-draft-entries` directive ] @@ -394,7 +394,8 @@ ("py:class", "aiohttp.protocol.HttpVersion"), # undocumented ("py:class", "aiohttp.ClientRequest"), # undocumented ("py:class", "aiohttp.payload.Payload"), # undocumented - ("py:class", "aiohttp.abc.AbstractResolver"), # undocumented + ("py:class", "aiohttp.resolver.AsyncResolver"), # undocumented + ("py:class", "aiohttp.resolver.ThreadedResolver"), # undocumented ("py:func", "aiohttp.ws_connect"), # undocumented ("py:meth", "start"), # undocumented ("py:exc", "aiohttp.ClientHttpProxyError"), # undocumented diff --git a/docs/contributing-admins.rst b/docs/contributing-admins.rst index 9444f8a..acfaebc 100644 --- a/docs/contributing-admins.rst +++ b/docs/contributing-admins.rst @@ -52,6 +52,6 @@ Back on the original release branch, bump the version number and append ``.dev0` If doing a minor release: #. Create a new release branch for future features to go to: e.g. ``git checkout -b 3.10 3.9 && git push`` -#. Update ``target-branch`` for Dependabot to reference the new branch name in ``.github/dependabot.yml``. +#. Update both ``target-branch`` backports for Dependabot to reference the new branch name in ``.github/dependabot.yml``. #. Delete the older backport label (e.g. backport-3.8): https://github.com/aio-libs/aiohttp/labels #. Add a new backport label (e.g. backport-3.10). diff --git a/docs/faq.rst b/docs/faq.rst index 2de70f9..30803da 100644 --- a/docs/faq.rst +++ b/docs/faq.rst @@ -279,7 +279,18 @@ A subapplication is an isolated unit by design. If you need to share a database object, do it explicitly:: subapp[db_key] = mainapp[db_key] - mainapp.add_subapp('/prefix', subapp) + mainapp.add_subapp("/prefix", subapp) + +This can also be done from a :ref:`cleanup context`:: + + async def db_context(app: web.Application) -> AsyncIterator[None]: + async with create_db() as db: + mainapp[db_key] = mainapp[subapp_key][db_key] = db + yield + + mainapp[subapp_key] = subapp + mainapp.add_subapp("/prefix", subapp) + mainapp.cleanup_ctx.append(db_context) How do I perform operations in a request handler after sending the response? diff --git a/docs/index.rst b/docs/index.rst index 4f55c5d..4ce20ac 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -147,7 +147,6 @@ or have some suggestion in order to improve the library. Dependencies ============ -- *async_timeout* - *attrs* - *multidict* - *yarl* @@ -174,7 +173,7 @@ Communication channels Feel free to post your questions and ideas here. -*gitter chat* https://gitter.im/aio-libs/Lobby +*Matrix*: `#aio-libs:matrix.org `_ We support `Stack Overflow `_. diff --git a/docs/streams.rst b/docs/streams.rst index 10eec6d..8e4be9d 100644 --- a/docs/streams.rst +++ b/docs/streams.rst @@ -26,13 +26,17 @@ Reading Methods .. method:: StreamReader.read(n=-1) :async: - Read up to *n* bytes. If *n* is not provided, or set to ``-1``, read until - EOF and return all read bytes. + Read up to a maximum of *n* bytes. If *n* is not provided, or set to ``-1``, + read until EOF and return all read bytes. + + When *n* is provided, data will be returned as soon as it is available. + Therefore it will return less than *n* bytes if there are less than *n* + bytes in the buffer. If the EOF was received and the internal buffer is empty, return an empty bytes object. - :param int n: how many bytes to read, ``-1`` for the whole stream. + :param int n: maximum number of bytes to read, ``-1`` for the whole stream. :return bytes: the given data @@ -127,6 +131,14 @@ size limit and over any available data. async for data in response.content.iter_chunked(1024): print(data) + To get chunks that are exactly *n* bytes, you could use the + `asyncstdlib.itertools `_ + module:: + + chunks = batched(chain.from_iterable(response.content.iter_chunked(n)), n) + async for data in chunks: + print(data) + .. method:: StreamReader.iter_any() :async: @@ -170,7 +182,7 @@ Helpers .. seealso:: - :meth:`StreamReader.at_eof()` + :meth:`StreamReader.at_eof` .. method:: StreamReader.at_eof() @@ -196,7 +208,7 @@ Helpers .. warning:: The method does not wake up waiters. - E.g. :meth:`~StreamReader.read()` will not be resumed. + E.g. :meth:`~StreamReader.read` will not be resumed. .. method:: wait_eof() diff --git a/docs/testing.rst b/docs/testing.rst index 027ba63..a7b93e7 100644 --- a/docs/testing.rst +++ b/docs/testing.rst @@ -32,20 +32,6 @@ insert ``pytest_plugins = 'aiohttp.pytest_plugin'`` line into -Provisional Status -~~~~~~~~~~~~~~~~~~ - -The module is a **provisional**. - -*aiohttp* has a year and half period for removing deprecated API -(:ref:`aiohttp-backward-compatibility-policy`). - -But for :mod:`aiohttp.test_tools` the deprecation period could be reduced. - -Moreover we may break *backward compatibility* without *deprecation -period* for some very strong reason. - - The Test Client and Servers ~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -76,14 +62,19 @@ Pytest The :data:`aiohttp_client` fixture available from pytest-aiohttp_ plugin allows you to create a client to make requests to test your app. -A simple would be:: +To run these examples, you need to use `--asyncio-mode=auto` or add to your +pytest config file:: + + asyncio_mode = auto + +A simple test would be:: from aiohttp import web async def hello(request): return web.Response(text='Hello, world') - async def test_hello(aiohttp_client, loop): + async def test_hello(aiohttp_client): app = web.Application() app.router.add_get('/', hello) client = await aiohttp_client(app) @@ -111,11 +102,11 @@ app test client:: body='value: {}'.format(request.app[value]).encode('utf-8')) @pytest.fixture - def cli(loop, aiohttp_client): + async def cli(aiohttp_client): app = web.Application() app.router.add_get('/', previous) app.router.add_post('/', previous) - return loop.run_until_complete(aiohttp_client(app)) + return await aiohttp_client(app) async def test_set_value(cli): resp = await cli.post('/', data={'value': 'foo'}) @@ -458,14 +449,12 @@ Framework Agnostic Utilities High level test creation:: - from aiohttp.test_utils import TestClient, TestServer, loop_context + from aiohttp.test_utils import TestClient, TestServer from aiohttp import request - # loop_context is provided as a utility. You can use any - # asyncio.BaseEventLoop class in its place. - with loop_context() as loop: + async def test(): app = _create_example_app() - with TestClient(TestServer(app), loop=loop) as client: + async with TestClient(TestServer(app)) as client: async def test_get_route(): nonlocal client @@ -474,7 +463,7 @@ High level test creation:: text = await resp.text() assert "Hello, world" in text - loop.run_until_complete(test_get_route()) + await test_get_route() If it's preferred to handle the creation / teardown on a more granular @@ -482,10 +471,10 @@ basis, the TestClient object can be used directly:: from aiohttp.test_utils import TestClient, TestServer - with loop_context() as loop: + async def test(): app = _create_example_app() - client = TestClient(TestServer(app), loop=loop) - loop.run_until_complete(client.start_server()) + client = TestClient(TestServer(app)) + await client.start_server() root = "http://127.0.0.1:{}".format(port) async def test_get_route(): @@ -494,8 +483,8 @@ basis, the TestClient object can be used directly:: text = await resp.text() assert "Hello, world" in text - loop.run_until_complete(test_get_route()) - loop.run_until_complete(client.close()) + await test_get_route() + await client.close() A full list of the utilities provided can be found at the diff --git a/docs/third_party.rst b/docs/third_party.rst index 5c354f1..e8095c7 100644 --- a/docs/third_party.rst +++ b/docs/third_party.rst @@ -30,6 +30,10 @@ and located on https://github.com/aio-libs aiohttp extensions ^^^^^^^^^^^^^^^^^^ +- `aiohttp-apischema `_ + provides automatic API schema generation and validation of user input + for :mod:`aiohttp.web`. + - `aiohttp-session `_ provides sessions for :mod:`aiohttp.web`. @@ -295,3 +299,9 @@ ask to raise the status. - `rsocket `_ Python implementation of `RSocket protocol `_. + +- `nacl_middleware `_ + An aiohttp middleware library for asymmetric encryption of data transmitted via http and/or websocket connections. + +- `aiohttp-asgi-connector `_ + An aiohttp connector for using a ``ClientSession`` to interface directly with separate ASGI applications. diff --git a/docs/web_advanced.rst b/docs/web_advanced.rst index d2ba301..070bae3 100644 --- a/docs/web_advanced.rst +++ b/docs/web_advanced.rst @@ -48,6 +48,8 @@ socket closing on the peer side without reading the full server response. except OSError: # disconnected +.. _web-handler-cancellation: + Web handler cancellation ^^^^^^^^^^^^^^^^^^^^^^^^ @@ -68,38 +70,48 @@ needed to deal with them. .. warning:: - :term:`web-handler` execution could be canceled on every ``await`` - if client drops connection without reading entire response's BODY. + :term:`web-handler` execution could be canceled on every ``await`` or + ``async with`` if client drops connection without reading entire response's BODY. Sometimes it is a desirable behavior: on processing ``GET`` request the code might fetch data from a database or other web resource, the fetching is potentially slow. -Canceling this fetch is a good idea: the peer dropped connection +Canceling this fetch is a good idea: the client dropped the connection already, so there is no reason to waste time and resources (memory etc) -by getting data from a DB without any chance to send it back to peer. +by getting data from a DB without any chance to send it back to the client. -But sometimes the cancellation is bad: on ``POST`` request very often -it is needed to save data to a DB regardless of peer closing. +But sometimes the cancellation is bad: on ``POST`` requests very often +it is needed to save data to a DB regardless of connection closing. Cancellation prevention could be implemented in several ways: -* Applying :func:`asyncio.shield` to a coroutine that saves data. -* Using aiojobs_ or another third party library. +* Applying :func:`aiojobs.aiohttp.shield` to a coroutine that saves data. +* Using aiojobs_ or another third party library to run a task in the background. + +:func:`aiojobs.aiohttp.shield` can work well. The only disadvantage is you +need to split the web handler into two async functions: one for the handler +itself and another for protected code. + +.. warning:: -:func:`asyncio.shield` can work well. The only disadvantage is you -need to split web handler into exactly two async functions: one -for handler itself and other for protected code. + We don't recommend using :func:`asyncio.shield` for this because the shielded + task cannot be tracked by the application and therefore there is a risk that + the task will get cancelled during application shutdown. The function provided + by aiojobs_ operates in the same way except the inner task will be tracked + by the Scheduler and will get waited on during the cleanup phase. For example the following snippet is not safe:: + from aiojobs.aiohttp import shield + async def handler(request): - await asyncio.shield(write_to_redis(request)) - await asyncio.shield(write_to_postgres(request)) + await shield(request, write_to_redis(request)) + await shield(request, write_to_postgres(request)) return web.Response(text="OK") -Cancellation might occur while saving data in REDIS, so -``write_to_postgres`` will not be called, potentially +Cancellation might occur while saving data in REDIS, so the +``write_to_postgres`` function will not be called, potentially leaving your data in an inconsistent state. Instead, you would need to write something like:: @@ -109,7 +121,7 @@ Instead, you would need to write something like:: await write_to_postgres(request) async def handler(request): - await asyncio.shield(write_data(request)) + await shield(request, write_data(request)) return web.Response(text="OK") Alternatively, if you want to spawn a task without waiting for @@ -160,7 +172,7 @@ restoring the default disconnection behavior only for specific handlers:: app.router.add_post("/", handler) It prevents all of the ``handler`` async function from cancellation, -so ``write_to_db`` will be never interrupted. +so ``write_to_db`` will never be interrupted. .. _aiojobs: http://aiojobs.readthedocs.io/en/latest/ @@ -936,30 +948,24 @@ always satisfactory. When aiohttp is run with :func:`run_app`, it will attempt a graceful shutdown by following these steps (if using a :ref:`runner `, then calling :meth:`AppRunner.cleanup` will perform these steps, excluding -steps 4 and 7). +step 7). 1. Stop each site listening on sockets, so new connections will be rejected. 2. Close idle keep-alive connections (and set active ones to close upon completion). 3. Call the :attr:`Application.on_shutdown` signal. This should be used to shutdown long-lived connections, such as websockets (see below). -4. Wait a short time for running tasks to complete. This allows any pending handlers - or background tasks to complete successfully. The timeout can be adjusted with - ``shutdown_timeout`` in :func:`run_app`. +4. Wait a short time for running handlers to complete. This allows any pending handlers + to complete successfully. The timeout can be adjusted with ``shutdown_timeout`` + in :func:`run_app`. 5. Close any remaining connections and cancel their handlers. It will wait on the canceling handlers for a short time, again adjustable with ``shutdown_timeout``. 6. Call the :attr:`Application.on_cleanup` signal. This should be used to cleanup any resources (such as DB connections). This includes completing the - :ref:`cleanup contexts`. + :ref:`cleanup contexts` which may be used to ensure + background tasks are completed successfully (see + :ref:`handler cancellation` or aiojobs_ for examples). 7. Cancel any remaining tasks and wait on them to complete. -.. note:: - - When creating new tasks in a handler which _should_ be cancelled on server shutdown, - then it is important to keep track of those tasks and explicitly cancel them in a - :attr:`Application.on_shutdown` callback. As we can see from the above steps, - without this the server will wait on those new tasks to complete before it continues - with server shutdown. - Websocket shutdown ^^^^^^^^^^^^^^^^^^ @@ -1058,13 +1064,10 @@ below:: async with client.pubsub() as pubsub: await pubsub.subscribe(channel) while True: - try: - msg = await pubsub.get_message(ignore_subscribe_messages=True) - if msg is not None: - for ws in app["websockets"]: - await ws.send_str("{}: {}".format(channel, msg)) - except asyncio.CancelledError: - break + msg = await pubsub.get_message(ignore_subscribe_messages=True) + if msg is not None: + for ws in app["websockets"]: + await ws.send_str("{}: {}".format(channel, msg)) async def background_tasks(app): @@ -1073,7 +1076,8 @@ below:: yield app[redis_listener].cancel() - await app[redis_listener] + with contextlib.suppress(asyncio.CancelledError): + await app[redis_listener] app = web.Application() diff --git a/docs/web_quickstart.rst b/docs/web_quickstart.rst index c36a995..fcd2b68 100644 --- a/docs/web_quickstart.rst +++ b/docs/web_quickstart.rst @@ -441,8 +441,11 @@ third-party library, :mod:`aiohttp_session`, that adds *session* support:: async def handler(request): session = await get_session(request) - last_visit = session['last_visit'] if 'last_visit' in session else None - text = 'Last visited: {}'.format(last_visit) + + last_visit = session.get("last_visit") + session["last_visit"] = time.time() + text = "Last visited: {}".format(last_visit) + return web.Response(text=text) async def make_app(): diff --git a/docs/web_reference.rst b/docs/web_reference.rst index aedac0e..06c1c03 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -510,7 +510,6 @@ and :ref:`aiohttp-web-signals` handlers. required work will be processed by :mod:`aiohttp.web` internal machinery. - .. class:: Request A request used for receiving request's information by *web handler*. @@ -925,6 +924,31 @@ and :ref:`aiohttp-web-signals` handlers:: :attr:`~aiohttp.StreamResponse.body`, represented as :class:`str`. +.. class:: FileResponse(*, path, chunk_size=256*1024, status=200, reason=None, headers=None) + + The response class used to send files, inherited from :class:`StreamResponse`. + + Supports the ``Content-Range`` and ``If-Range`` HTTP Headers in requests. + + The actual :attr:`body` sending happens in overridden :meth:`~StreamResponse.prepare`. + + :param path: Path to file. Accepts both :class:`str` and :class:`pathlib.Path`. + :param int chunk_size: Chunk size in bytes which will be passed into + :meth:`io.RawIOBase.read` in the event that the + ``sendfile`` system call is not supported. + + :param int status: HTTP status code, ``200`` by default. + + :param str reason: HTTP reason. If param is ``None`` reason will be + calculated basing on *status* + parameter. Otherwise pass :class:`str` with + arbitrary *status* explanation.. + + :param collections.abc.Mapping headers: HTTP headers that should be added to + response's ones. The ``Content-Type`` response header + will be overridden if provided. + + .. class:: WebSocketResponse(*, timeout=10.0, receive_timeout=None, \ autoclose=True, autoping=True, heartbeat=None, \ protocols=(), compress=True, max_msg_size=4194304) @@ -938,8 +962,8 @@ and :ref:`aiohttp-web-signals` handlers:: :meth:`receive` and others. To enable back-pressure from slow websocket clients treat methods - :meth:`ping()`, :meth:`pong()`, :meth:`send_str()`, - :meth:`send_bytes()`, :meth:`send_json()` as coroutines. By + :meth:`ping`, :meth:`pong`, :meth:`send_str`, + :meth:`send_bytes`, :meth:`send_json` as coroutines. By default write buffer size is set to 64k. :param bool autoping: Automatically send @@ -1625,7 +1649,7 @@ Application and Router :async: A :ref:`coroutine` that should be called on - server stopping but before :meth:`cleanup()`. + server stopping but before :meth:`cleanup`. The purpose of the method is calling :attr:`on_shutdown` signal handlers. @@ -1846,14 +1870,18 @@ Application and Router system call even if the platform supports it. This can be accomplished by by setting environment variable ``AIOHTTP_NOSENDFILE=1``. - If a gzip version of the static content exists at file path + ``.gz``, it - will be used for the response. + If a Brotli or gzip compressed version of the static content exists at + the requested path with the ``.br`` or ``.gz`` extension, it will be used + for the response. Brotli will be preferred over gzip if both files exist. .. warning:: Use :meth:`add_static` for development only. In production, static content should be processed by web servers like *nginx* - or *apache*. + or *apache*. Such web servers will be able to provide significantly + better performance and security for static assets. Several past security + vulnerabilities in aiohttp only affected applications using + :meth:`add_static`. :param str prefix: URL path prefix for handled static files @@ -1972,20 +2000,38 @@ unique *name* and at least one :term:`route`. :term:`web-handler` lookup is performed in the following way: -1. Router iterates over *resources* one-by-one. -2. If *resource* matches to requested URL the resource iterates over - own *routes*. -3. If route matches to requested HTTP method (or ``'*'`` wildcard) the - route's handler is used as found :term:`web-handler`. The lookup is - finished. -4. Otherwise router tries next resource from the *routing table*. -5. If the end of *routing table* is reached and no *resource* / - *route* pair found the *router* returns special :class:`~aiohttp.abc.AbstractMatchInfo` +1. The router splits the URL and checks the index from longest to shortest. + For example, '/one/two/three' will first check the index for + '/one/two/three', then '/one/two' and finally '/'. +2. If the URL part is found in the index, the list of routes for + that URL part is iterated over. If a route matches to requested HTTP + method (or ``'*'`` wildcard) the route's handler is used as the chosen + :term:`web-handler`. The lookup is finished. +3. If the route is not found in the index, the router tries to find + the route in the list of :class:`~aiohttp.web.MatchedSubAppResource`, + (current only created from :meth:`~aiohttp.web.Application.add_domain`), + and will iterate over the list of + :class:`~aiohttp.web.MatchedSubAppResource` in a linear fashion + until a match is found. +4. If no *resource* / *route* pair was found, the *router* + returns the special :class:`~aiohttp.abc.AbstractMatchInfo` instance with :attr:`aiohttp.abc.AbstractMatchInfo.http_exception` is not ``None`` but :exc:`HTTPException` with either *HTTP 404 Not Found* or *HTTP 405 Method Not Allowed* status code. Registered :meth:`~aiohttp.abc.AbstractMatchInfo.handler` raises this exception on call. +Fixed paths are preferred over variable paths. For example, +if you have two routes ``/a/b`` and ``/a/{name}``, then the first +route will always be preferred over the second one. + +If there are multiple dynamic paths with the same fixed prefix, +they will be resolved in order of registration. + +For example, if you have two dynamic routes that are prefixed +with the fixed ``/users`` path such as ``/users/{x}/{y}/z`` and +``/users/{x}/y/z``, the first one will be preferred over the +second one. + User should never instantiate resource classes but give it by :meth:`UrlDispatcher.add_resource` call. @@ -2007,7 +2053,10 @@ Resource classes hierarchy:: Resource PlainResource DynamicResource + PrefixResource StaticResource + PrefixedSubAppResource + MatchedSubAppResource .. class:: AbstractResource @@ -2688,7 +2737,8 @@ application on specific TCP or Unix socket, e.g.:: :param bool handle_signals: add signal handlers for :data:`signal.SIGINT` and :data:`signal.SIGTERM` (``False`` by - default). + default). These handlers will raise + :exc:`GracefulExit`. :param kwargs: named parameters to pass into web protocol. @@ -2761,7 +2811,8 @@ application on specific TCP or Unix socket, e.g.:: :param bool handle_signals: add signal handlers for :data:`signal.SIGINT` and :data:`signal.SIGTERM` (``False`` by - default). + default). These handlers will raise + :exc:`GracefulExit`. :param kwargs: named parameters to pass into web protocol. @@ -2892,6 +2943,16 @@ application on specific TCP or Unix socket, e.g.:: ``128`` by default. +.. exception:: GracefulExit + + Raised by signal handlers for :data:`signal.SIGINT` and :data:`signal.SIGTERM` + defined in :class:`AppRunner` and :class:`ServerRunner` + when ``handle_signals`` is set to ``True``. + + Inherited from :exc:`SystemExit`, + which exits with error code ``1`` if not handled. + + Utilities --------- diff --git a/examples/fake_server.py b/examples/fake_server.py index 3157bab..2cfe3ed 100755 --- a/examples/fake_server.py +++ b/examples/fake_server.py @@ -3,10 +3,11 @@ import pathlib import socket import ssl +from typing import List import aiohttp from aiohttp import web -from aiohttp.abc import AbstractResolver +from aiohttp.abc import AbstractResolver, ResolveResult from aiohttp.resolver import DefaultResolver from aiohttp.test_utils import unused_port @@ -19,7 +20,12 @@ def __init__(self, fakes, *, loop): self._fakes = fakes self._resolver = DefaultResolver(loop=loop) - async def resolve(self, host, port=0, family=socket.AF_INET): + async def resolve( + self, + host: str, + port: int = 0, + family: socket.AddressFamily = socket.AF_INET, + ) -> List[ResolveResult]: fake_port = self._fakes.get(host) if fake_port is not None: return [ diff --git a/pyproject.toml b/pyproject.toml index 85d7c87..3396268 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -82,3 +82,8 @@ skip = "pp*" [tool.codespell] skip = '.git,*.pdf,*.svg,Makefile,CONTRIBUTORS.txt,venvs,_build' ignore-words-list = 'te' + +[tool.slotscheck] +# TODO(3.13): Remove aiohttp.helpers once https://github.com/python/cpython/pull/106771 +# is available in all supported cpython versions +exclude-modules = "(^aiohttp\\.helpers)" diff --git a/requirements/.hash/cython.txt.hash b/requirements/.hash/cython.txt.hash index dc98ead..d635446 100644 --- a/requirements/.hash/cython.txt.hash +++ b/requirements/.hash/cython.txt.hash @@ -1 +1 @@ -568bc64a6a44121726592a8966a04919f5781004cca7efb8d7afc1135ebf54fd /home/runner/work/aiohttp/aiohttp/requirements/cython.txt +7b50f4e32516f7a808dbe40b1c88ab367699d62151edae4eb989010c35da30e4 /home/runner/work/aiohttp/aiohttp/requirements/cython.txt diff --git a/requirements/base.in b/requirements/base.in index df67f78..70493b6 100644 --- a/requirements/base.in +++ b/requirements/base.in @@ -1,4 +1,3 @@ --r typing-extensions.in -r runtime-deps.in gunicorn diff --git a/requirements/base.txt b/requirements/base.txt index 77943e4..e089d2b 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -4,27 +4,29 @@ # # pip-compile --allow-unsafe --output-file=requirements/base.txt --strip-extras requirements/base.in # -aiodns==3.1.1 ; sys_platform == "linux" or sys_platform == "darwin" +aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" + # via -r requirements/runtime-deps.in +aiohappyeyeballs==2.3.4 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in async-timeout==4.0.3 ; python_version < "3.11" # via -r requirements/runtime-deps.in -attrs==23.1.0 +attrs==23.2.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in -cffi==1.15.1 +cffi==1.17.0 # via pycares -frozenlist==1.4.0 +frozenlist==1.4.1 # via # -r requirements/runtime-deps.in # aiosignal -gunicorn==21.2.0 +gunicorn==22.0.0 # via -r requirements/base.in idna==3.4 # via yarl -multidict==6.0.4 +multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl @@ -34,9 +36,7 @@ pycares==4.3.0 # via aiodns pycparser==2.21 # via cffi -typing-extensions==4.7.1 - # via -r requirements/typing-extensions.in -uvloop==0.19.0 ; platform_system != "Windows" and implementation_name == "cpython" +uvloop==0.21.0b1 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in -yarl==1.9.3 +yarl==1.13.0 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index adba72b..30356d0 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -4,9 +4,13 @@ # # pip-compile --allow-unsafe --output-file=requirements/constraints.txt --resolver=backtracking --strip-extras requirements/constraints.in # -aiodns==3.1.1 ; sys_platform == "linux" or sys_platform == "darwin" +aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" + # via + # -r requirements/lint.in + # -r requirements/runtime-deps.in +aiohappyeyeballs==2.3.4 # via -r requirements/runtime-deps.in -aiohttp-theme==0.1.6 +aiohttp-theme==0.1.7 # via -r requirements/doc.in aioredis==2.0.1 # via -r requirements/lint.in @@ -14,13 +18,13 @@ aiosignal==1.3.1 # via -r requirements/runtime-deps.in alabaster==0.7.12 # via sphinx -annotated-types==0.5.0 +annotated-types==0.7.0 # via pydantic async-timeout==4.0.3 ; python_version < "3.11" # via # -r requirements/runtime-deps.in # aioredis -attrs==23.1.0 +attrs==23.2.0 # via -r requirements/runtime-deps.in babel==2.9.1 # via sphinx @@ -30,11 +34,11 @@ blockdiag==2.0.1 # via sphinxcontrib-blockdiag brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in -build==0.9.0 +build==1.0.3 # via pip-tools certifi==2023.7.22 # via requests -cffi==1.15.0 +cffi==1.17.0 # via # cryptography # pycares @@ -52,7 +56,7 @@ click==8.0.3 # towncrier # typer # wait-for-it -coverage==7.3.2 +coverage==7.6.0 # via # -r requirements/test.in # pytest-cov @@ -60,7 +64,7 @@ cryptography==41.0.2 # via # pyjwt # trustme -cython==3.0.5 +cython==3.0.10 # via -r requirements/cython.in distlib==0.3.3 # via virtualenv @@ -70,9 +74,9 @@ exceptiongroup==1.1.2 # via pytest filelock==3.3.2 # via virtualenv -freezegun==1.3.0 +freezegun==1.5.1 # via -r requirements/test.in -frozenlist==1.4.0 +frozenlist==1.4.1 # via # -r requirements/runtime-deps.in # aiosignal @@ -80,7 +84,7 @@ funcparserlib==1.0.1 # via blockdiag gidgethub==5.0.1 # via cherry-picker -gunicorn==21.2.0 +gunicorn==22.0.0 # via -r requirements/base.in identify==2.3.5 # via pre-commit @@ -91,6 +95,12 @@ idna==3.3 # yarl imagesize==1.3.0 # via sphinx +importlib-metadata==7.0.0 + # via + # build + # sphinx +importlib-resources==6.1.1 + # via towncrier incremental==22.10.0 # via towncrier iniconfig==1.1.1 @@ -101,12 +111,12 @@ jinja2==3.0.3 # towncrier markupsafe==2.0.1 # via jinja2 -multidict==6.0.4 +multidict==6.0.5 # via # -r requirements/multidict.in # -r requirements/runtime-deps.in # yarl -mypy==1.7.1 ; implementation_name == "cpython" +mypy==1.11.1 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test.in @@ -120,29 +130,27 @@ packaging==21.2 # gunicorn # pytest # sphinx -pep517==0.12.0 - # via build pillow==9.5.0 # via # -c requirements/broken-projects.in # blockdiag -pip-tools==7.3.0 +pip-tools==7.4.1 # via -r requirements/dev.in platformdirs==2.4.0 # via virtualenv -pluggy==1.0.0 +pluggy==1.5.0 # via pytest pre-commit==3.5.0 # via -r requirements/lint.in -proxy-py==2.4.4rc4 +proxy-py==2.4.4 # via -r requirements/test.in pycares==4.3.0 # via aiodns pycparser==2.21 # via cffi -pydantic==2.2.0 +pydantic==2.9.2 # via python-on-whales -pydantic-core==2.6.0 +pydantic-core==2.23.4 # via pydantic pyenchant==3.2.2 # via sphinxcontrib-spelling @@ -154,25 +162,33 @@ pyjwt==2.3.0 # pyjwt pyparsing==2.4.7 # via packaging -pytest==7.4.3 +pyproject-hooks==1.0.0 + # via + # build + # pip-tools +pytest==8.3.2 # via # -r requirements/lint.in # -r requirements/test.in # pytest-cov # pytest-mock -pytest-cov==4.1.0 +pytest-cov==5.0.0 # via -r requirements/test.in -pytest-mock==3.12.0 +pytest-mock==3.14.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun -python-on-whales==0.67.0 - # via -r requirements/test.in +python-on-whales==0.72.0 + # via + # -r requirements/lint.in + # -r requirements/test.in +pytz==2023.3.post1 + # via babel pyyaml==6.0.1 # via pre-commit re-assert==1.1.0 # via -r requirements/test.in -regex==2021.11.10 +regex==2024.9.11 # via re-assert requests==2.31.0 # via @@ -185,7 +201,7 @@ six==1.16.0 # via # python-dateutil # virtualenv -slotscheck==0.17.1 +slotscheck==0.19.0 # via -r requirements/lint.in snowballstemmer==2.1.0 # via sphinx @@ -219,8 +235,8 @@ tomli==2.0.1 # cherry-picker # coverage # mypy - # pep517 # pip-tools + # pyproject-hooks # pytest # slotscheck # towncrier @@ -234,9 +250,8 @@ trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in typer==0.6.1 # via python-on-whales -typing-extensions==4.7.1 +typing-extensions==4.12.2 # via - # -r requirements/typing-extensions.in # aioredis # annotated-types # mypy @@ -247,7 +262,7 @@ uritemplate==4.1.1 # via gidgethub urllib3==1.26.7 # via requests -uvloop==0.19.0 ; platform_system != "Windows" +uvloop==0.21.0b1 ; platform_system != "Windows" # via # -r requirements/base.in # -r requirements/lint.in @@ -259,8 +274,12 @@ webcolors==1.11.1 # via blockdiag wheel==0.37.0 # via pip-tools -yarl==1.9.3 +yarl==1.13.0 # via -r requirements/runtime-deps.in +zipp==3.17.0 + # via + # importlib-metadata + # importlib-resources # The following packages are considered to be unsafe in a requirements file: pip==23.2.1 diff --git a/requirements/cython.in b/requirements/cython.in index ee07533..6f0238f 100644 --- a/requirements/cython.in +++ b/requirements/cython.in @@ -1,4 +1,3 @@ -r multidict.in --r typing-extensions.in # required for parsing aiohttp/hdrs.py by tools/gen.py Cython diff --git a/requirements/cython.txt b/requirements/cython.txt index 5851f1d..053c390 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -4,9 +4,9 @@ # # pip-compile --allow-unsafe --output-file=requirements/cython.txt --resolver=backtracking --strip-extras requirements/cython.in # -cython==3.0.5 +cython==3.0.10 # via -r requirements/cython.in -multidict==6.0.4 +multidict==6.0.5 # via -r requirements/multidict.in -typing-extensions==4.7.1 +typing-extensions==4.12.2 # via -r requirements/typing-extensions.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 3d5926c..8aeea4e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -4,9 +4,13 @@ # # pip-compile --allow-unsafe --output-file=requirements/dev.txt --resolver=backtracking --strip-extras requirements/dev.in # -aiodns==3.1.1 ; sys_platform == "linux" or sys_platform == "darwin" +aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" + # via + # -r requirements/lint.in + # -r requirements/runtime-deps.in +aiohappyeyeballs==2.3.4 # via -r requirements/runtime-deps.in -aiohttp-theme==0.1.6 +aiohttp-theme==0.1.7 # via -r requirements/doc.in aioredis==2.0.1 # via -r requirements/lint.in @@ -14,13 +18,13 @@ aiosignal==1.3.1 # via -r requirements/runtime-deps.in alabaster==0.7.13 # via sphinx -annotated-types==0.5.0 +annotated-types==0.7.0 # via pydantic async-timeout==4.0.3 ; python_version < "3.11" # via # -r requirements/runtime-deps.in # aioredis -attrs==23.1.0 +attrs==23.2.0 # via -r requirements/runtime-deps.in babel==2.12.1 # via sphinx @@ -28,11 +32,11 @@ blockdiag==3.0.0 # via sphinxcontrib-blockdiag brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in -build==0.10.0 +build==1.0.3 # via pip-tools certifi==2023.7.22 # via requests -cffi==1.15.1 +cffi==1.17.0 # via # cryptography # pycares @@ -50,7 +54,7 @@ click==8.1.6 # towncrier # typer # wait-for-it -coverage==7.3.2 +coverage==7.6.0 # via # -r requirements/test.in # pytest-cov @@ -66,9 +70,9 @@ exceptiongroup==1.1.2 # via pytest filelock==3.12.2 # via virtualenv -freezegun==1.3.0 +freezegun==1.5.1 # via -r requirements/test.in -frozenlist==1.4.0 +frozenlist==1.4.1 # via # -r requirements/runtime-deps.in # aiosignal @@ -76,7 +80,7 @@ funcparserlib==1.0.1 # via blockdiag gidgethub==5.3.0 # via cherry-picker -gunicorn==21.2.0 +gunicorn==22.0.0 # via -r requirements/base.in identify==2.5.26 # via pre-commit @@ -87,6 +91,12 @@ idna==3.4 # yarl imagesize==1.4.1 # via sphinx +importlib-metadata==7.0.0 + # via + # build + # sphinx +importlib-resources==6.1.1 + # via towncrier incremental==22.10.0 # via towncrier iniconfig==2.0.0 @@ -97,11 +107,11 @@ jinja2==3.1.2 # towncrier markupsafe==2.1.3 # via jinja2 -multidict==6.0.4 +multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl -mypy==1.7.1 ; implementation_name == "cpython" +mypy==1.11.1 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test.in @@ -119,23 +129,23 @@ pillow==9.5.0 # via # -c requirements/broken-projects.in # blockdiag -pip-tools==7.3.0 +pip-tools==7.4.1 # via -r requirements/dev.in platformdirs==3.10.0 # via virtualenv -pluggy==1.2.0 +pluggy==1.5.0 # via pytest pre-commit==3.5.0 # via -r requirements/lint.in -proxy-py==2.4.4rc4 +proxy-py==2.4.4 # via -r requirements/test.in pycares==4.3.0 # via aiodns pycparser==2.21 # via cffi -pydantic==2.2.0 +pydantic==2.9.2 # via python-on-whales -pydantic-core==2.6.0 +pydantic-core==2.23.4 # via pydantic pygments==2.15.1 # via sphinx @@ -144,26 +154,32 @@ pyjwt==2.8.0 # gidgethub # pyjwt pyproject-hooks==1.0.0 - # via build -pytest==7.4.3 + # via + # build + # pip-tools +pytest==8.3.2 # via # -r requirements/lint.in # -r requirements/test.in # pytest-cov # pytest-mock -pytest-cov==4.1.0 +pytest-cov==5.0.0 # via -r requirements/test.in -pytest-mock==3.12.0 +pytest-mock==3.14.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun -python-on-whales==0.67.0 - # via -r requirements/test.in +python-on-whales==0.72.0 + # via + # -r requirements/lint.in + # -r requirements/test.in +pytz==2023.3.post1 + # via babel pyyaml==6.0.1 # via pre-commit re-assert==1.1.0 # via -r requirements/test.in -regex==2023.6.3 +regex==2024.9.11 # via re-assert requests==2.31.0 # via @@ -174,7 +190,7 @@ setuptools-git==1.2 # via -r requirements/test.in six==1.16.0 # via python-dateutil -slotscheck==0.17.1 +slotscheck==0.19.0 # via -r requirements/lint.in snowballstemmer==2.2.0 # via sphinx @@ -220,9 +236,8 @@ trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in typer==0.9.0 # via python-on-whales -typing-extensions==4.7.1 +typing-extensions==4.12.2 # via - # -r requirements/typing-extensions.in # aioredis # annotated-types # mypy @@ -234,7 +249,7 @@ uritemplate==4.1.1 # via gidgethub urllib3==2.0.4 # via requests -uvloop==0.19.0 ; platform_system != "Windows" and implementation_name == "cpython" +uvloop==0.21.0b1 ; platform_system != "Windows" and implementation_name == "cpython" # via # -r requirements/base.in # -r requirements/lint.in @@ -246,8 +261,12 @@ webcolors==1.13 # via blockdiag wheel==0.41.0 # via pip-tools -yarl==1.9.3 +yarl==1.13.0 # via -r requirements/runtime-deps.in +zipp==3.17.0 + # via + # importlib-metadata + # importlib-resources # The following packages are considered to be unsafe in a requirements file: pip==23.2.1 diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 9ee1518..5b84195 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/doc-spelling.txt --resolver=backtracking --strip-extras requirements/doc-spelling.in # -aiohttp-theme==0.1.6 +aiohttp-theme==0.1.7 # via -r requirements/doc.in alabaster==0.7.13 # via sphinx diff --git a/requirements/doc.txt b/requirements/doc.txt index d9e7fb0..5a27f89 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/doc.txt --resolver=backtracking --strip-extras requirements/doc.in # -aiohttp-theme==0.1.6 +aiohttp-theme==0.1.7 # via -r requirements/doc.in alabaster==0.7.13 # via sphinx diff --git a/requirements/lint.in b/requirements/lint.in index 3461615..0d46809 100644 --- a/requirements/lint.in +++ b/requirements/lint.in @@ -1,8 +1,11 @@ --r typing-extensions.in - +aiodns aioredis +freezegun mypy; implementation_name == "cpython" pre-commit pytest +pytest-mock +python-on-whales slotscheck +trustme uvloop; platform_system != "Windows" diff --git a/requirements/lint.txt b/requirements/lint.txt index 28d0bf6..5f1b068 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -4,25 +4,47 @@ # # pip-compile --allow-unsafe --output-file=requirements/lint.txt --resolver=backtracking --strip-extras requirements/lint.in # +aiodns==3.2.0 + # via -r requirements/lint.in aioredis==2.0.1 # via -r requirements/lint.in +annotated-types==0.7.0 + # via pydantic async-timeout==4.0.3 # via aioredis +certifi==2024.2.2 + # via requests +cffi==1.17.0 + # via + # cryptography + # pycares cfgv==3.3.1 # via pre-commit +charset-normalizer==3.3.2 + # via requests click==8.1.6 - # via slotscheck + # via + # slotscheck + # typer distlib==0.3.7 # via virtualenv exceptiongroup==1.1.2 # via pytest filelock==3.12.2 # via virtualenv +freezegun==1.5.1 + # via -r requirements/lint.in identify==2.5.26 # via pre-commit +idna==3.7 + # via requests iniconfig==2.0.0 # via pytest -mypy==1.7.1 ; implementation_name == "cpython" +markdown-it-py==3.0.0 + # via rich +mdurl==0.1.2 + # via markdown-it-py +mypy==1.11.1 ; implementation_name == "cpython" # via -r requirements/lint.in mypy-extensions==1.0.0 # via mypy @@ -32,27 +54,60 @@ packaging==23.1 # via pytest platformdirs==3.10.0 # via virtualenv -pluggy==1.2.0 +pluggy==1.5.0 # via pytest pre-commit==3.5.0 # via -r requirements/lint.in -pytest==7.4.3 +pycares==4.4.0 + # via aiodns +pycparser==2.22 + # via cffi +pydantic==2.9.2 + # via python-on-whales +pydantic-core==2.23.4 + # via pydantic +pygments==2.17.2 + # via rich +pytest==8.3.2 + # via -r requirements/lint.in +pytest-mock==3.14.0 + # via -r requirements/lint.in +python-on-whales==0.72.0 # via -r requirements/lint.in pyyaml==6.0.1 # via pre-commit -slotscheck==0.17.1 +requests==2.31.0 + # via python-on-whales +rich==13.7.1 + # via typer +shellingham==1.5.4 + # via typer +slotscheck==0.19.0 # via -r requirements/lint.in tomli==2.0.1 # via # mypy # pytest # slotscheck -typing-extensions==4.7.1 +tqdm==4.66.2 + # via python-on-whales +trustme==1.1.0 + # via -r requirements/lint.in +typer==0.12.3 + # via python-on-whales +typing-extensions==4.12.2 # via - # -r requirements/typing-extensions.in # aioredis + # annotated-types # mypy -uvloop==0.19.0 ; platform_system != "Windows" + # pydantic + # pydantic-core + # python-on-whales + # rich + # typer +urllib3==2.2.1 + # via requests +uvloop==0.21.0b1 ; platform_system != "Windows" # via -r requirements/lint.in virtualenv==20.24.2 # via pre-commit diff --git a/requirements/multidict.txt b/requirements/multidict.txt index 9c4f984..915f9c2 100644 --- a/requirements/multidict.txt +++ b/requirements/multidict.txt @@ -4,5 +4,5 @@ # # pip-compile --allow-unsafe --output-file=requirements/multidict.txt --resolver=backtracking --strip-extras requirements/multidict.in # -multidict==6.0.4 +multidict==6.0.5 # via -r requirements/multidict.in diff --git a/requirements/runtime-deps.in b/requirements/runtime-deps.in index b2df16f..9a19945 100644 --- a/requirements/runtime-deps.in +++ b/requirements/runtime-deps.in @@ -1,6 +1,7 @@ # Extracted from `setup.cfg` via `make sync-direct-runtime-deps` -aiodns; sys_platform=="linux" or sys_platform=="darwin" +aiodns >= 3.2.0; sys_platform=="linux" or sys_platform=="darwin" +aiohappyeyeballs >= 2.3.0 aiosignal >= 1.1.2 async-timeout >= 4.0, < 5.0 ; python_version < "3.11" attrs >= 17.3.0 @@ -8,4 +9,4 @@ Brotli; platform_python_implementation == 'CPython' brotlicffi; platform_python_implementation != 'CPython' frozenlist >= 1.1.1 multidict >=4.5, < 7.0 -yarl >= 1.0, < 2.0 +yarl >= 1.12.0, < 2.0 diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index a0f2aa8..a70b633 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -4,25 +4,27 @@ # # pip-compile --allow-unsafe --output-file=requirements/runtime-deps.txt --strip-extras requirements/runtime-deps.in # -aiodns==3.1.1 ; sys_platform == "linux" or sys_platform == "darwin" +aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" + # via -r requirements/runtime-deps.in +aiohappyeyeballs==2.3.4 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in async-timeout==4.0.3 ; python_version < "3.11" # via -r requirements/runtime-deps.in -attrs==23.1.0 +attrs==23.2.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in -cffi==1.15.1 +cffi==1.17.0 # via pycares -frozenlist==1.4.0 +frozenlist==1.4.1 # via # -r requirements/runtime-deps.in # aiosignal idna==3.4 # via yarl -multidict==6.0.4 +multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl @@ -30,5 +32,5 @@ pycares==4.3.0 # via aiodns pycparser==2.21 # via cffi -yarl==1.9.3 +yarl==1.13.0 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 57c00fc..8be1e49 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -4,21 +4,23 @@ # # pip-compile --allow-unsafe --output-file=requirements/test.txt --resolver=backtracking --strip-extras requirements/test.in # -aiodns==3.1.1 ; sys_platform == "linux" or sys_platform == "darwin" +aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" + # via -r requirements/runtime-deps.in +aiohappyeyeballs==2.3.4 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in -annotated-types==0.5.0 +annotated-types==0.7.0 # via pydantic async-timeout==4.0.3 ; python_version < "3.11" # via -r requirements/runtime-deps.in -attrs==23.1.0 +attrs==23.2.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in certifi==2023.7.22 # via requests -cffi==1.15.1 +cffi==1.17.0 # via # cryptography # pycares @@ -28,7 +30,7 @@ click==8.1.6 # via # typer # wait-for-it -coverage==7.3.2 +coverage==7.6.0 # via # -r requirements/test.in # pytest-cov @@ -36,13 +38,13 @@ cryptography==41.0.2 # via trustme exceptiongroup==1.1.2 # via pytest -freezegun==1.3.0 +freezegun==1.5.1 # via -r requirements/test.in -frozenlist==1.4.0 +frozenlist==1.4.1 # via # -r requirements/runtime-deps.in # aiosignal -gunicorn==21.2.0 +gunicorn==22.0.0 # via -r requirements/base.in idna==3.4 # via @@ -51,11 +53,11 @@ idna==3.4 # yarl iniconfig==2.0.0 # via pytest -multidict==6.0.4 +multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl -mypy==1.7.1 ; implementation_name == "cpython" +mypy==1.11.1 ; implementation_name == "cpython" # via -r requirements/test.in mypy-extensions==1.0.0 # via mypy @@ -63,34 +65,34 @@ packaging==23.1 # via # gunicorn # pytest -pluggy==1.2.0 +pluggy==1.5.0 # via pytest -proxy-py==2.4.4rc4 +proxy-py==2.4.4 # via -r requirements/test.in pycares==4.3.0 # via aiodns pycparser==2.21 # via cffi -pydantic==2.2.0 +pydantic==2.9.2 # via python-on-whales -pydantic-core==2.6.0 +pydantic-core==2.23.4 # via pydantic -pytest==7.4.3 +pytest==8.3.2 # via # -r requirements/test.in # pytest-cov # pytest-mock -pytest-cov==4.1.0 +pytest-cov==5.0.0 # via -r requirements/test.in -pytest-mock==3.12.0 +pytest-mock==3.14.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun -python-on-whales==0.67.0 +python-on-whales==0.72.0 # via -r requirements/test.in re-assert==1.1.0 # via -r requirements/test.in -regex==2023.6.3 +regex==2024.9.11 # via re-assert requests==2.31.0 # via python-on-whales @@ -109,9 +111,8 @@ trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in typer==0.9.0 # via python-on-whales -typing-extensions==4.7.1 +typing-extensions==4.12.2 # via - # -r requirements/typing-extensions.in # annotated-types # mypy # pydantic @@ -120,9 +121,9 @@ typing-extensions==4.7.1 # typer urllib3==2.0.4 # via requests -uvloop==0.19.0 ; platform_system != "Windows" and implementation_name == "cpython" +uvloop==0.21.0b1 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in wait-for-it==2.2.2 # via -r requirements/test.in -yarl==1.9.3 +yarl==1.13.0 # via -r requirements/runtime-deps.in diff --git a/requirements/typing-extensions.in b/requirements/typing-extensions.in deleted file mode 100644 index 5fd4f05..0000000 --- a/requirements/typing-extensions.in +++ /dev/null @@ -1 +0,0 @@ -typing_extensions diff --git a/requirements/typing-extensions.txt b/requirements/typing-extensions.txt deleted file mode 100644 index c45af72..0000000 --- a/requirements/typing-extensions.txt +++ /dev/null @@ -1,8 +0,0 @@ -# -# This file is autogenerated by pip-compile with python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --output-file=requirements/typing-extensions.txt --resolver=backtracking --strip-extras requirements/typing-extensions.in -# -typing-extensions==4.7.1 - # via -r requirements/typing-extensions.in diff --git a/setup.cfg b/setup.cfg index b5f5943..751d470 100644 --- a/setup.cfg +++ b/setup.cfg @@ -38,6 +38,7 @@ classifiers = Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.11 Programming Language :: Python :: 3.12 + Programming Language :: Python :: 3.13 Topic :: Internet :: WWW/HTTP @@ -47,12 +48,13 @@ packages = aiohttp zip_safe = False include_package_data = True install_requires = + aiohappyeyeballs >= 2.3.0 aiosignal >= 1.1.2 - attrs >= 17.3.0 async-timeout >= 4.0, < 5.0 ; python_version < "3.11" + attrs >= 17.3.0 frozenlist >= 1.1.1 multidict >=4.5, < 7.0 - yarl >= 1.0, < 2.0 + yarl >= 1.12.0, < 2.0 [options.exclude_package_data] * = @@ -61,7 +63,7 @@ install_requires = [options.extras_require] speedups = - aiodns; sys_platform=="linux" or sys_platform=="darwin" + aiodns >= 3.2.0; sys_platform=="linux" or sys_platform=="darwin" Brotli; platform_python_implementation == 'CPython' brotlicffi; platform_python_implementation != 'CPython' @@ -120,8 +122,6 @@ addopts = filterwarnings = error ignore:module 'ssl' has no attribute 'OP_NO_COMPRESSION'. The Python interpreter is compiled against OpenSSL < 1.0.0. Ref. https.//docs.python.org/3/library/ssl.html#ssl.OP_NO_COMPRESSION:UserWarning - ignore:unclosed transport :ResourceWarning ignore:Unclosed client session None: print("Stopping client and server") client.terminate() client.wait() - autobahn_container.stop() + # https://github.com/gabrieldemarmiesse/python-on-whales/pull/580 + autobahn_container.stop() # type: ignore[union-attr] failed_messages = get_failed_tests(f"{report_dir}/clients", "aiohttp") diff --git a/tests/conftest.py b/tests/conftest.py index 44e5fb7..85fcac9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,15 +1,20 @@ import asyncio +import base64 import os import socket import ssl import sys -from hashlib import md5, sha256 +from hashlib import md5, sha1, sha256 from pathlib import Path from tempfile import TemporaryDirectory +from typing import Any +from unittest import mock from uuid import uuid4 import pytest +from aiohttp.client_proto import ResponseHandler +from aiohttp.http import WS_KEY from aiohttp.test_utils import loop_context try: @@ -167,6 +172,17 @@ def pipe_name(): return name +@pytest.fixture +def create_mocked_conn(loop: Any): + def _proto_factory(conn_closing_result=None, **kwargs): + proto = mock.create_autospec(ResponseHandler, **kwargs) + proto.closed = loop.create_future() + proto.closed.set_result(conn_closing_result) + return proto + + yield _proto_factory + + @pytest.fixture def selector_loop(): policy = asyncio.WindowsSelectorEventLoopPolicy() @@ -197,3 +213,28 @@ def netrc_contents( monkeypatch.setenv("NETRC", str(netrc_file_path)) return netrc_file_path + + +@pytest.fixture +def start_connection(): + with mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) as start_connection_mock: + yield start_connection_mock + + +@pytest.fixture +def key_data(): + return os.urandom(16) + + +@pytest.fixture +def key(key_data: Any): + return base64.b64encode(key_data) + + +@pytest.fixture +def ws_key(key: Any): + return base64.b64encode(sha1(key + WS_KEY).digest()).decode() diff --git a/tests/test_circular_imports.py b/tests/test_circular_imports.py index 5163264..d513e9b 100644 --- a/tests/test_circular_imports.py +++ b/tests/test_circular_imports.py @@ -8,6 +8,7 @@ * https://github.com/pytest-dev/pytest/blob/d18c75b/testing/test_meta.py * https://twitter.com/codewithanthony/status/1229445110510735361 """ + import os import pkgutil import socket @@ -30,14 +31,16 @@ def _mark_aiohttp_worker_for_skipping( importables: List[str], ) -> List[Union[str, "ParameterSet"]]: return [ - pytest.param( - importable, - marks=pytest.mark.skipif( - not hasattr(socket, "AF_UNIX"), reason="It's a UNIX-only module" - ), + ( + pytest.param( + importable, + marks=pytest.mark.skipif( + not hasattr(socket, "AF_UNIX"), reason="It's a UNIX-only module" + ), + ) + if importable == "aiohttp.worker" + else importable ) - if importable == "aiohttp.worker" - else importable for importable in importables ] diff --git a/tests/test_client_exceptions.py b/tests/test_client_exceptions.py index f70ba5d..85e71a3 100644 --- a/tests/test_client_exceptions.py +++ b/tests/test_client_exceptions.py @@ -5,6 +5,8 @@ from unittest import mock import pytest +from multidict import CIMultiDict +from yarl import URL from aiohttp import client, client_reqrep @@ -43,7 +45,7 @@ def test_pickle(self) -> None: history=(), status=400, message="Something wrong", - headers={}, + headers=CIMultiDict(foo="bar"), ) err.foo = "bar" for proto in range(pickle.HIGHEST_PROTOCOL + 1): @@ -53,7 +55,8 @@ def test_pickle(self) -> None: assert err2.history == () assert err2.status == 400 assert err2.message == "Something wrong" - assert err2.headers == {} + # Use headers.get() to verify static type is correct. + assert err2.headers.get("foo") == "bar" assert err2.foo == "bar" def test_repr(self) -> None: @@ -65,11 +68,11 @@ def test_repr(self) -> None: history=(), status=400, message="Something wrong", - headers={}, + headers=CIMultiDict(), ) assert repr(err) == ( "ClientResponseError(%r, (), status=400, " - "message='Something wrong', headers={})" % (self.request_info,) + "message='Something wrong', headers=)" % (self.request_info,) ) def test_str(self) -> None: @@ -78,7 +81,7 @@ def test_str(self) -> None: history=(), status=400, message="Something wrong", - headers={}, + headers=CIMultiDict(), ) assert str(err) == ( "400, message='Something wrong', " "url='http://example.com'" @@ -298,8 +301,9 @@ def test_repr(self) -> None: class TestInvalidURL: def test_ctor(self) -> None: - err = client.InvalidURL(url=":wrong:url:") + err = client.InvalidURL(url=":wrong:url:", description=":description:") assert err.url == ":wrong:url:" + assert err.description == ":description:" def test_pickle(self) -> None: err = client.InvalidURL(url=":wrong:url:") @@ -310,10 +314,27 @@ def test_pickle(self) -> None: assert err2.url == ":wrong:url:" assert err2.foo == "bar" - def test_repr(self) -> None: + def test_repr_no_description(self) -> None: err = client.InvalidURL(url=":wrong:url:") + assert err.args == (":wrong:url:",) assert repr(err) == "" - def test_str(self) -> None: + def test_repr_yarl_URL(self) -> None: + err = client.InvalidURL(url=URL(":wrong:url:")) + assert repr(err) == "" + + def test_repr_with_description(self) -> None: + err = client.InvalidURL(url=":wrong:url:", description=":description:") + assert repr(err) == "" + + def test_str_no_description(self) -> None: err = client.InvalidURL(url=":wrong:url:") assert str(err) == ":wrong:url:" + + def test_none_description(self) -> None: + err = client.InvalidURL(":wrong:url:") + assert err.description is None + + def test_str_with_description(self) -> None: + err = client.InvalidURL(url=":wrong:url:", description=":description:") + assert str(err) == ":wrong:url: - :description:" diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index dbb2dff..30ceebd 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -8,7 +8,11 @@ import pathlib import socket import ssl -from typing import Any, AsyncIterator +import sys +import tarfile +import time +import zipfile +from typing import Any, AsyncIterator, Optional, Type from unittest import mock import pytest @@ -18,8 +22,20 @@ import aiohttp from aiohttp import Fingerprint, ServerFingerprintMismatch, hdrs, web from aiohttp.abc import AbstractResolver -from aiohttp.client_exceptions import TooManyRedirects -from aiohttp.pytest_plugin import AiohttpClient, TestClient +from aiohttp.client_exceptions import ( + ClientResponseError, + InvalidURL, + InvalidUrlClientError, + InvalidUrlRedirectClientError, + NonHttpUrlClientError, + NonHttpUrlRedirectClientError, + SocketTimeoutError, + TooManyRedirects, +) +from aiohttp.client_reqrep import ClientRequest +from aiohttp.connector import Connection +from aiohttp.http_writer import StreamWriter +from aiohttp.pytest_plugin import AiohttpClient, AiohttpServer, TestClient from aiohttp.test_utils import unused_port @@ -214,6 +230,67 @@ async def handler(request): assert 0 == len(client._session.connector._conns) +async def test_keepalive_timeout_async_sleep() -> None: + async def handler(request): + body = await request.read() + assert b"" == body + return web.Response(body=b"OK") + + app = web.Application() + app.router.add_route("GET", "/", handler) + + runner = web.AppRunner(app, tcp_keepalive=True, keepalive_timeout=0.001) + await runner.setup() + + port = unused_port() + site = web.TCPSite(runner, host="localhost", port=port) + await site.start() + + try: + async with aiohttp.client.ClientSession() as sess: + resp1 = await sess.get(f"http://localhost:{port}/") + await resp1.read() + # wait for server keepalive_timeout + await asyncio.sleep(0.01) + resp2 = await sess.get(f"http://localhost:{port}/") + await resp2.read() + finally: + await asyncio.gather(runner.shutdown(), site.stop()) + + +@pytest.mark.skipif( + sys.version_info[:2] == (3, 11), + reason="https://github.com/pytest-dev/pytest/issues/10763", +) +async def test_keepalive_timeout_sync_sleep() -> None: + async def handler(request): + body = await request.read() + assert b"" == body + return web.Response(body=b"OK") + + app = web.Application() + app.router.add_route("GET", "/", handler) + + runner = web.AppRunner(app, tcp_keepalive=True, keepalive_timeout=0.001) + await runner.setup() + + port = unused_port() + site = web.TCPSite(runner, host="localhost", port=port) + await site.start() + + try: + async with aiohttp.client.ClientSession() as sess: + resp1 = await sess.get(f"http://localhost:{port}/") + await resp1.read() + # wait for server keepalive_timeout + # time.sleep is a more challenging scenario than asyncio.sleep + time.sleep(0.01) + resp2 = await sess.get(f"http://localhost:{port}/") + await resp2.read() + finally: + await asyncio.gather(runner.shutdown(), site.stop()) + + async def test_release_early(aiohttp_client) -> None: async def handler(request): await request.read() @@ -268,10 +345,11 @@ async def data_gen(): async with client.get("/") as resp: assert 200 == resp.status - # Connection should have been reused + # First connection should have been closed, otherwise server won't know if it + # received the full message. conns = next(iter(client.session.connector._conns.values())) assert len(conns) == 1 - assert conns[0][0] is conn + assert conns[0][0] is not conn async def test_stream_request_on_server_eof_nested(aiohttp_client) -> None: @@ -289,14 +367,21 @@ async def data_gen(): yield b"just data" await asyncio.sleep(0.1) + assert client.session.connector is not None async with client.put("/", data=data_gen()) as resp: + first_conn = next(iter(client.session.connector._acquired)) assert 200 == resp.status - async with client.get("/") as resp: - assert 200 == resp.status + + async with client.get("/") as resp2: + assert 200 == resp2.status # Should be 2 separate connections conns = next(iter(client.session.connector._conns.values())) - assert len(conns) == 2 + assert len(conns) == 1 + + assert first_conn is not None + assert not first_conn.is_connected() + assert first_conn is not conns[0][0] async def test_HTTP_304_WITH_BODY(aiohttp_client) -> None: @@ -440,6 +525,61 @@ async def handler(request): assert 200 == resp.status +async def test_post_data_zipfile_filelike(aiohttp_client: AiohttpClient) -> None: + data = b"This is a zip file payload text file." + + async def handler(request: web.Request) -> web.Response: + val = await request.read() + assert data == val, "Transmitted zipfile member failed to match original data." + return web.Response() + + app = web.Application() + app.router.add_route("POST", "/", handler) + client = await aiohttp_client(app) + + buf = io.BytesIO() + with zipfile.ZipFile(file=buf, mode="w") as zf: + with zf.open("payload1.txt", mode="w") as zip_filelike_writing: + zip_filelike_writing.write(data) + + buf.seek(0) + zf = zipfile.ZipFile(file=buf, mode="r") + resp = await client.post("/", data=zf.open("payload1.txt")) + assert 200 == resp.status + + +async def test_post_data_tarfile_filelike(aiohttp_client: AiohttpClient) -> None: + data = b"This is a tar file payload text file." + + async def handler(request: web.Request) -> web.Response: + val = await request.read() + assert data == val, "Transmitted tarfile member failed to match original data." + return web.Response() + + app = web.Application() + app.router.add_route("POST", "/", handler) + client = await aiohttp_client(app) + + buf = io.BytesIO() + with tarfile.open(fileobj=buf, mode="w") as tf: + ti = tarfile.TarInfo(name="payload1.txt") + ti.size = len(data) + tf.addfile(tarinfo=ti, fileobj=io.BytesIO(data)) + + # Random-access tarfile. + buf.seek(0) + tf = tarfile.open(fileobj=buf, mode="r:") + resp = await client.post("/", data=tf.extractfile("payload1.txt")) + assert 200 == resp.status + + # Streaming tarfile. + buf.seek(0) + tf = tarfile.open(fileobj=buf, mode="r|") + for entry in tf: + resp = await client.post("/", data=tf.extractfile(entry)) + assert 200 == resp.status + + async def test_ssl_client( aiohttp_server, ssl_ctx, @@ -509,8 +649,6 @@ async def handler(request): async def test_format_task_get(aiohttp_server) -> None: - loop = asyncio.get_event_loop() - async def handler(request): return web.Response(body=b"OK") @@ -518,7 +656,7 @@ async def handler(request): app.router.add_route("GET", "/", handler) server = await aiohttp_server(app) client = aiohttp.ClientSession() - task = loop.create_task(client.get(server.make_url("/"))) + task = asyncio.create_task(client.get(server.make_url("/"))) assert f"{task}".startswith(" None: - async def handler_redirect(request): +async def test_params_and_query_string(aiohttp_client: AiohttpClient) -> None: + """Test combining params with an existing query_string.""" + + async def handler(request: web.Request) -> web.Response: + assert request.rel_url.query_string == "q=abc&q=test&d=dog" + return web.Response() + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + + async with client.get("/?q=abc", params="q=test&d=dog") as resp: + assert resp.status == 200 + + +@pytest.mark.parametrize("params", [None, "", {}, MultiDict()]) +async def test_empty_params_and_query_string( + aiohttp_client: AiohttpClient, params: Any +) -> None: + """Test combining empty params with an existing query_string.""" + + async def handler(request: web.Request) -> web.Response: + assert request.rel_url.query_string == "q=abc" + return web.Response() + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + + async with client.get("/?q=abc", params=params) as resp: + assert resp.status == 200 + + +async def test_drop_params_on_redirect(aiohttp_client: AiohttpClient) -> None: + async def handler_redirect(request: web.Request) -> web.Response: return web.Response(status=301, headers={"Location": "/ok?a=redirect"}) async def handler_ok(request): @@ -658,7 +829,6 @@ async def handler(request): raw_headers = tuple((bytes(h), bytes(v)) for h, v in resp.raw_headers) assert raw_headers == ( (b"Content-Length", b"0"), - (b"Content-Type", b"application/octet-stream"), (b"Date", mock.ANY), (b"Server", mock.ANY), ) @@ -691,7 +861,6 @@ async def handler(request): assert raw_headers == ( (b"X-Empty", b""), (b"Content-Length", b"0"), - (b"Content-Type", b"application/octet-stream"), (b"Date", mock.ANY), (b"Server", mock.ANY), ) @@ -1058,7 +1227,7 @@ async def redirect(request): app.router.add_get("/redirect", redirect) client = await aiohttp_client(app) - with pytest.raises(ValueError): + with pytest.raises(NonHttpUrlRedirectClientError): await client.get("/redirect") @@ -1333,8 +1502,44 @@ async def handler(request): assert 200 == resp.status -async def test_POST_DATA_DEFLATE(aiohttp_client) -> None: - async def handler(request): +@pytest.mark.parametrize("data", (None, b"")) +async def test_GET_DEFLATE( + aiohttp_client: AiohttpClient, data: Optional[bytes] +) -> None: + async def handler(request: web.Request) -> web.Response: + return web.json_response({"ok": True}) + + write_mock = None + original_write_bytes = ClientRequest.write_bytes + + async def write_bytes( + self: ClientRequest, writer: StreamWriter, conn: Connection + ) -> None: + nonlocal write_mock + original_write = writer._write + + with mock.patch.object( + writer, "_write", autospec=True, spec_set=True, side_effect=original_write + ) as write_mock: + await original_write_bytes(self, writer, conn) + + with mock.patch.object(ClientRequest, "write_bytes", write_bytes): + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + async with client.get("/", data=data, compress=True) as resp: + assert resp.status == 200 + content = await resp.json() + assert content == {"ok": True} + + assert write_mock is not None + # No chunks should have been sent for an empty body. + write_mock.assert_not_called() + + +async def test_POST_DATA_DEFLATE(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: data = await request.post() return web.json_response(dict(data)) @@ -2392,6 +2597,132 @@ async def handler_redirect(request): await client.post("/", chunked=1024) +INVALID_URL_WITH_ERROR_MESSAGE_YARL_NEW = ( + # yarl.URL.__new__ raises ValueError + ("http://:/", "http://:/"), + ("http://example.org:non_int_port/", "http://example.org:non_int_port/"), +) + +INVALID_URL_WITH_ERROR_MESSAGE_YARL_ORIGIN = ( + # # yarl.URL.origin raises ValueError + ("http:/", "http:///"), + ("http:/example.com", "http:///example.com"), + ("http:///example.com", "http:///example.com"), +) + +NON_HTTP_URL_WITH_ERROR_MESSAGE = ( + ("call:+380123456789", r"call:\+380123456789"), + ("skype:handle", "skype:handle"), + ("slack://instance/room", "slack://instance/room"), + ("steam:code", "steam:code"), + ("twitter://handle", "twitter://handle"), + ("bluesky://profile/d:i:d", "bluesky://profile/d:i:d"), +) + + +@pytest.mark.parametrize( + ("url", "error_message_url", "expected_exception_class"), + ( + *( + (url, message, InvalidUrlClientError) + for (url, message) in INVALID_URL_WITH_ERROR_MESSAGE_YARL_NEW + ), + *( + (url, message, InvalidUrlClientError) + for (url, message) in INVALID_URL_WITH_ERROR_MESSAGE_YARL_ORIGIN + ), + *( + (url, message, NonHttpUrlClientError) + for (url, message) in NON_HTTP_URL_WITH_ERROR_MESSAGE + ), + ), +) +async def test_invalid_and_non_http_url( + url: Any, error_message_url: Any, expected_exception_class: Any +) -> None: + async with aiohttp.ClientSession() as http_session: + with pytest.raises( + expected_exception_class, match=rf"^{error_message_url}( - [A-Za-z ]+)?" + ): + await http_session.get(url) + + +@pytest.mark.parametrize( + ("invalid_redirect_url", "error_message_url", "expected_exception_class"), + ( + *( + (url, message, InvalidUrlRedirectClientError) + for (url, message) in INVALID_URL_WITH_ERROR_MESSAGE_YARL_ORIGIN + + INVALID_URL_WITH_ERROR_MESSAGE_YARL_NEW + ), + *( + (url, message, NonHttpUrlRedirectClientError) + for (url, message) in NON_HTTP_URL_WITH_ERROR_MESSAGE + ), + ), +) +async def test_invalid_redirect_url( + aiohttp_client: Any, + invalid_redirect_url: Any, + error_message_url: str, + expected_exception_class: Any, +) -> None: + headers = {hdrs.LOCATION: invalid_redirect_url} + + async def generate_redirecting_response(request): + return web.Response(status=301, headers=headers) + + app = web.Application() + app.router.add_get("/redirect", generate_redirecting_response) + client = await aiohttp_client(app) + + with pytest.raises( + expected_exception_class, match=rf"^{error_message_url}( - [A-Za-z ]+)?" + ): + await client.get("/redirect") + + +@pytest.mark.parametrize( + ("invalid_redirect_url", "error_message_url", "expected_exception_class"), + ( + *( + (url, message, InvalidUrlRedirectClientError) + for (url, message) in INVALID_URL_WITH_ERROR_MESSAGE_YARL_ORIGIN + + INVALID_URL_WITH_ERROR_MESSAGE_YARL_NEW + ), + *( + (url, message, NonHttpUrlRedirectClientError) + for (url, message) in NON_HTTP_URL_WITH_ERROR_MESSAGE + ), + ), +) +async def test_invalid_redirect_url_multiple_redirects( + aiohttp_client: Any, + invalid_redirect_url: Any, + error_message_url: str, + expected_exception_class: Any, +) -> None: + app = web.Application() + + for path, location in [ + ("/redirect", "/redirect1"), + ("/redirect1", "/redirect2"), + ("/redirect2", invalid_redirect_url), + ]: + + async def generate_redirecting_response(request): + return web.Response(status=301, headers={hdrs.LOCATION: location}) + + app.router.add_get(path, generate_redirecting_response) + + client = await aiohttp_client(app) + + with pytest.raises( + expected_exception_class, match=rf"^{error_message_url}( - [A-Za-z ]+)?" + ): + await client.get("/redirect") + + @pytest.mark.parametrize( ("status", "expected_ok"), ( @@ -2785,7 +3116,38 @@ async def test_aiohttp_request_ctx_manager_not_found() -> None: assert False, "never executed" # pragma: no cover -async def test_yield_from_in_session_request(aiohttp_client) -> None: +async def test_raising_client_connector_dns_error_on_dns_failure() -> None: + """Verify that the exception raised when a DNS lookup fails is specific to DNS.""" + with mock.patch( + "aiohttp.connector.TCPConnector._resolve_host", autospec=True, spec_set=True + ) as mock_resolve_host: + mock_resolve_host.side_effect = OSError(None, "DNS lookup failed") + with pytest.raises(aiohttp.ClientConnectorDNSError, match="DNS lookup failed"): + async with aiohttp.request("GET", "http://wrong-dns-name.com"): + assert False, "never executed" + + +async def test_aiohttp_request_coroutine(aiohttp_server: AiohttpServer) -> None: + async def handler(request: web.Request) -> web.Response: + return web.Response() + + app = web.Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app) + + not_an_awaitable = aiohttp.request("GET", server.make_url("/")) + with pytest.raises( + TypeError, + match="^object _SessionRequestContextManager " + "can't be used in 'await' expression$", + ): + await not_an_awaitable # type: ignore[misc] + + await not_an_awaitable._coro # coroutine 'ClientSession._request' was never awaited + await server.close() + + +async def test_yield_from_in_session_request(aiohttp_client: AiohttpClient) -> None: # a test for backward compatibility with yield from syntax async def handler(request): return web.Response() @@ -3001,21 +3363,20 @@ def connection_lost(self, exc): addr = server.sockets[0].getsockname() - connector = aiohttp.TCPConnector(limit=1) - session = aiohttp.ClientSession(connector=connector) + async with aiohttp.TCPConnector(limit=1) as connector: + async with aiohttp.ClientSession(connector=connector) as session: + url = "http://{}:{}/".format(*addr) - url = "http://{}:{}/".format(*addr) + r = await session.request("GET", url) + await r.read() + assert 1 == len(connector._conns) + closed_conn = next(iter(connector._conns.values())) - r = await session.request("GET", url) - await r.read() - assert 1 == len(connector._conns) + await session.request("GET", url) + assert 1 == len(connector._conns) + new_conn = next(iter(connector._conns.values())) + assert closed_conn is not new_conn - with pytest.raises(aiohttp.ClientConnectionError): - await session.request("GET", url) - assert 0 == len(connector._conns) - - await session.close() - await connector.close() server.close() await server.wait_closed() @@ -3133,8 +3494,44 @@ async def handler(request): await resp.read() -async def test_read_from_closed_content(aiohttp_client) -> None: - async def handler(request): +async def test_read_after_catch_raise_for_status(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: + return web.Response(body=b"data", status=404) + + app = web.Application() + app.add_routes([web.get("/", handler)]) + + client = await aiohttp_client(app) + + async with client.get("/") as resp: + with pytest.raises(ClientResponseError, match="404"): + # Should not release response when in async with context. + resp.raise_for_status() + + result = await resp.read() + assert result == b"data" + + +async def test_read_after_raise_outside_context(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: + return web.Response(body=b"data", status=404) + + app = web.Application() + app.add_routes([web.get("/", handler)]) + + client = await aiohttp_client(app) + + resp = await client.get("/") + with pytest.raises(ClientResponseError, match="404"): + # No async with, so should release and therefore read() will fail. + resp.raise_for_status() + + with pytest.raises(aiohttp.ClientConnectionError, match=r"^Connection closed$"): + await resp.read() + + +async def test_read_from_closed_content(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: return web.Response(body=b"data") app = web.Application() @@ -3164,6 +3561,21 @@ async def handler(request): await client.get("/") +async def test_socket_timeout(aiohttp_client: Any) -> None: + async def handler(request): + await asyncio.sleep(5) + return web.Response() + + app = web.Application() + app.add_routes([web.get("/", handler)]) + + timeout = aiohttp.ClientTimeout(sock_read=0.1) + client = await aiohttp_client(app, timeout=timeout) + + with pytest.raises(SocketTimeoutError): + await client.get("/") + + async def test_read_timeout_closes_connection(aiohttp_client: AiohttpClient) -> None: request_count = 0 @@ -3387,9 +3799,10 @@ async def handler(request): assert resp.reason == "x" * 8191 -@pytest.mark.xfail(raises=asyncio.TimeoutError, reason="#7599") -async def test_rejected_upload(aiohttp_client, tmp_path) -> None: - async def ok_handler(request): +async def test_rejected_upload( + aiohttp_client: AiohttpClient, tmp_path: pathlib.Path +) -> None: + async def ok_handler(request: web.Request) -> web.Response: return web.Response() async def not_ok_handler(request): @@ -3406,10 +3819,50 @@ async def not_ok_handler(request): with open(file_path, "rb") as file: data = {"file": file} - async with await client.post("/not_ok", data=data) as resp_not_ok: - assert 400 == resp_not_ok.status + async with client.post("/not_ok", data=data) as resp_not_ok: + assert resp_not_ok.status == 400 + + async with client.get("/ok", timeout=aiohttp.ClientTimeout(total=1)) as resp_ok: + assert resp_ok.status == 200 + + +@pytest.mark.parametrize( + ("value", "exc_type"), + [(42, TypeError), ("InvalidUrl", InvalidURL)], +) +async def test_request_with_wrong_proxy( + aiohttp_client: AiohttpClient, value: Any, exc_type: Type[Exception] +) -> None: + app = web.Application() + session = await aiohttp_client(app) + + with pytest.raises(exc_type): + await session.get("/", proxy=value) # type: ignore[arg-type] + + +async def test_raise_for_status_is_none(aiohttp_client: AiohttpClient) -> None: + async def handler(_: web.Request) -> web.Response: + return web.Response() - async with await client.get( - "/ok", timeout=aiohttp.ClientTimeout(total=0.01) - ) as resp_ok: - assert 200 == resp_ok.status + app = web.Application() + app.router.add_get("/", handler) + session = await aiohttp_client(app, raise_for_status=None) # type: ignore[arg-type] + + await session.get("/") + + +async def test_exception_when_read_outside_of_session( + aiohttp_server: AiohttpServer, +) -> None: + async def handler(request: web.Request) -> web.Response: + return web.Response(body=b"1" * 1000000) + + app = web.Application() + app.router.add_get("/", handler) + + server = await aiohttp_server(app) + async with aiohttp.ClientSession() as sess: + resp = await sess.get(server.make_url("/")) + + with pytest.raises(RuntimeError, match="Connection closed"): + await resp.read() diff --git a/tests/test_client_request.py b/tests/test_client_request.py index 6084f68..c9d61bf 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -2,6 +2,7 @@ import hashlib import io import pathlib +import sys import urllib.parse import zlib from http.cookies import BaseCookie, Morsel, SimpleCookie @@ -13,7 +14,7 @@ from yarl import URL import aiohttp -from aiohttp import BaseConnector, hdrs, helpers, payload +from aiohttp import BaseConnector, client_reqrep, hdrs, helpers, payload from aiohttp.client_exceptions import ClientConnectionError from aiohttp.client_reqrep import ( ClientRequest, @@ -279,9 +280,16 @@ def test_host_header_ipv4(make_request) -> None: assert req.headers["HOST"] == "127.0.0.2" -def test_host_header_ipv6(make_request) -> None: - req = make_request("get", "http://[::2]") - assert req.headers["HOST"] == "[::2]" +@pytest.mark.parametrize("yarl_supports_host_subcomponent", [True, False]) +def test_host_header_ipv6(make_request, yarl_supports_host_subcomponent: bool) -> None: + # Ensure the old path is tested for old yarl versions + with mock.patch.object( + client_reqrep, + "_YARL_SUPPORTS_HOST_SUBCOMPONENT", + yarl_supports_host_subcomponent, + ): + req = make_request("get", "http://[::2]") + assert req.headers["HOST"] == "[::2]" def test_host_header_ipv4_with_port(make_request) -> None: @@ -453,6 +461,13 @@ def test_basic_auth_from_url(make_request) -> None: assert "python.org" == req.host +def test_basic_auth_no_user_from_url(make_request) -> None: + req = make_request("get", "http://:1234@python.org") + assert "AUTHORIZATION" in req.headers + assert "Basic OjEyMzQ=" == req.headers["AUTHORIZATION"] + assert "python.org" == req.host + + def test_basic_auth_from_url_overridden(make_request) -> None: req = make_request( "get", "http://garbage@python.org", auth=aiohttp.BasicAuth("nkim", "1234") @@ -996,8 +1011,15 @@ async def gen(): req = ClientRequest("POST", URL("http://python.org/"), data=gen(), loop=loop) assert req.chunked assert req.headers["TRANSFER-ENCODING"] == "chunked" + original_write_bytes = req.write_bytes - resp = await req.send(conn) + async def _mock_write_bytes(*args, **kwargs): + # Ensure the task is scheduled + await asyncio.sleep(0) + return await original_write_bytes(*args, **kwargs) + + with mock.patch.object(req, "write_bytes", _mock_write_bytes): + resp = await req.send(conn) assert asyncio.isfuture(req._writer) await resp.wait_for_close() assert req._writer is None @@ -1020,9 +1042,7 @@ async def gen(writer): assert req.headers["TRANSFER-ENCODING"] == "chunked" resp = await req.send(conn) - assert asyncio.isfuture(req._writer) await resp.wait_for_close() - assert req._writer is None assert ( buf.split(b"\r\n\r\n", 1)[1] == b"b\r\nbinary data\r\n7\r\n result\r\n0\r\n\r\n" ) @@ -1201,16 +1221,46 @@ async def test_oserror_on_write_bytes(loop, conn) -> None: await req.close() -async def test_terminate(loop, conn) -> None: +@pytest.mark.skipif(sys.version_info < (3, 11), reason="Needs Task.cancelling()") +async def test_cancel_close(loop: asyncio.AbstractEventLoop, conn: mock.Mock) -> None: req = ClientRequest("get", URL("http://python.org"), loop=loop) - resp = await req.send(conn) + req._writer = asyncio.Future() # type: ignore[assignment] + + t = asyncio.create_task(req.close()) + + # Start waiting on _writer + await asyncio.sleep(0) + + t.cancel() + # Cancellation should not be suppressed. + with pytest.raises(asyncio.CancelledError): + await t + + +async def test_terminate(loop: asyncio.AbstractEventLoop, conn: mock.Mock) -> None: + req = ClientRequest("get", URL("http://python.org"), loop=loop) + + async def _mock_write_bytes(*args, **kwargs): + # Ensure the task is scheduled + await asyncio.sleep(0) + + with mock.patch.object(req, "write_bytes", _mock_write_bytes): + resp = await req.send(conn) + assert req._writer is not None - writer = req._writer = WriterMock() + assert resp._writer is not None + await resp._writer + writer = WriterMock() + writer.done = mock.Mock(return_value=False) writer.cancel = mock.Mock() + req._writer = writer + resp._writer = writer + assert req._writer is not None + assert resp._writer is not None req.terminate() - assert req._writer is None writer.cancel.assert_called_with() + writer.done.assert_called_with() resp.close() await req.close() @@ -1222,9 +1272,19 @@ def test_terminate_with_closed_loop(loop, conn) -> None: async def go(): nonlocal req, resp, writer req = ClientRequest("get", URL("http://python.org")) - resp = await req.send(conn) + + async def _mock_write_bytes(*args, **kwargs): + # Ensure the task is scheduled + await asyncio.sleep(0) + + with mock.patch.object(req, "write_bytes", _mock_write_bytes): + resp = await req.send(conn) + assert req._writer is not None - writer = req._writer = WriterMock() + writer = WriterMock() + writer.done = mock.Mock(return_value=False) + req._writer = writer + resp._writer = writer await asyncio.sleep(0.05) @@ -1407,3 +1467,30 @@ def test_basicauth_from_empty_netrc( """Test that no Authorization header is sent when netrc is empty""" req = make_request("get", "http://example.com", trust_env=True) assert hdrs.AUTHORIZATION not in req.headers + + +async def test_connection_key_with_proxy() -> None: + """Verify the proxy headers are included in the ConnectionKey when a proxy is used.""" + proxy = URL("http://proxy.example.com") + req = ClientRequest( + "GET", + URL("http://example.com"), + proxy=proxy, + proxy_headers={"X-Proxy": "true"}, + loop=asyncio.get_running_loop(), + ) + assert req.connection_key.proxy_headers_hash is not None + await req.close() + + +async def test_connection_key_without_proxy() -> None: + """Verify the proxy headers are not included in the ConnectionKey when a proxy is used.""" + # If proxy is unspecified, proxy_headers should be ignored + req = ClientRequest( + "GET", + URL("http://example.com"), + proxy_headers={"X-Proxy": "true"}, + loop=asyncio.get_running_loop(), + ) + assert req.connection_key.proxy_headers_hash is None + await req.close() diff --git a/tests/test_client_response.py b/tests/test_client_response.py index 166089c..ede3950 100644 --- a/tests/test_client_response.py +++ b/tests/test_client_response.py @@ -6,7 +6,7 @@ from unittest import mock import pytest -from multidict import CIMultiDict +from multidict import CIMultiDict, CIMultiDictProxy from yarl import URL import aiohttp @@ -423,6 +423,36 @@ def side_effect(*args, **kwargs): assert response._connection is None +async def test_text_badly_encoded_encoding_header(loop, session) -> None: + session._resolve_charset = lambda *_: "utf-8" + response = ClientResponse( + "get", + URL("http://def-cl-resp.org"), + request_info=mock.Mock(), + writer=WriterMock(), + continue100=None, + timer=TimerNoop(), + traces=[], + loop=loop, + session=session, + ) + + def side_effect(*args: object, **kwargs: object): + fut = loop.create_future() + fut.set_result(b"foo") + return fut + + h = {"Content-Type": "text/html; charset=\udc81gutf-8\udc81\udc8d"} + response._headers = CIMultiDictProxy(CIMultiDict(h)) + content = response.content = mock.Mock() + content.read.side_effect = side_effect + + await response.read() + encoding = response.get_encoding() + + assert encoding == "utf-8" + + async def test_text_custom_encoding(loop, session) -> None: response = ClientResponse( "get", @@ -659,11 +689,13 @@ async def test_json_invalid_content_type(loop, session) -> None: ) response._headers = {"Content-Type": "data/octet-stream"} response._body = b"" + response.status = 500 with pytest.raises(aiohttp.ContentTypeError) as info: await response.json() assert info.value.request_info == response.request_info + assert info.value.status == 500 async def test_json_no_content(loop, session) -> None: diff --git a/tests/test_client_session.py b/tests/test_client_session.py index 416b6bb..dac05ae 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -4,8 +4,9 @@ import io import json from http.cookies import SimpleCookie -from typing import Any, List +from typing import Any, Awaitable, Callable, List from unittest import mock +from uuid import uuid4 import pytest from multidict import CIMultiDict, MultiDict @@ -15,10 +16,13 @@ import aiohttp from aiohttp import client, hdrs, web from aiohttp.client import ClientSession +from aiohttp.client_proto import ResponseHandler from aiohttp.client_reqrep import ClientRequest -from aiohttp.connector import BaseConnector, TCPConnector +from aiohttp.connector import BaseConnector, Connection, TCPConnector, UnixConnector from aiohttp.helpers import DEBUG +from aiohttp.http import RawResponseMessage from aiohttp.test_utils import make_mocked_coro +from aiohttp.tracing import Trace @pytest.fixture @@ -471,7 +475,124 @@ async def create_connection(req, traces, timeout): c.__del__() -async def test_cookie_jar_usage(loop, aiohttp_client) -> None: +@pytest.mark.parametrize("protocol", ["http", "https", "ws", "wss"]) +async def test_ws_connect_allowed_protocols( + create_session: Any, + create_mocked_conn: Any, + protocol: str, + ws_key: Any, + key_data: Any, +) -> None: + resp = mock.create_autospec(aiohttp.ClientResponse) + resp.status = 101 + resp.headers = { + hdrs.UPGRADE: "websocket", + hdrs.CONNECTION: "upgrade", + hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, + } + resp.url = URL(f"{protocol}://example") + resp.cookies = SimpleCookie() + resp.start = mock.AsyncMock() + + req = mock.create_autospec(aiohttp.ClientRequest, spec_set=True) + req_factory = mock.Mock(return_value=req) + req.send = mock.AsyncMock(return_value=resp) + # BaseConnector allows all high level protocols by default + connector = BaseConnector() + + session = await create_session(connector=connector, request_class=req_factory) + + connections = [] + original_connect = session._connector.connect + + async def connect(req, traces, timeout): + conn = await original_connect(req, traces, timeout) + connections.append(conn) + return conn + + async def create_connection(req, traces, timeout): + return create_mocked_conn() + + connector = session._connector + with mock.patch.object(connector, "connect", connect), mock.patch.object( + connector, "_create_connection", create_connection + ), mock.patch.object(connector, "_release"), mock.patch( + "aiohttp.client.os" + ) as m_os: + m_os.urandom.return_value = key_data + await session.ws_connect(f"{protocol}://example") + + # normally called during garbage collection. triggers an exception + # if the connection wasn't already closed + for c in connections: + c.close() + c.__del__() + + await session.close() + + +@pytest.mark.parametrize("protocol", ["http", "https", "ws", "wss", "unix"]) +async def test_ws_connect_unix_socket_allowed_protocols( + create_session: Callable[..., Awaitable[ClientSession]], + create_mocked_conn: Callable[[], ResponseHandler], + protocol: str, + ws_key: bytes, + key_data: bytes, +) -> None: + resp = mock.create_autospec(aiohttp.ClientResponse) + resp.status = 101 + resp.headers = { + hdrs.UPGRADE: "websocket", + hdrs.CONNECTION: "upgrade", + hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, + } + resp.url = URL(f"{protocol}://example") + resp.cookies = SimpleCookie() + resp.start = mock.AsyncMock() + + req = mock.create_autospec(aiohttp.ClientRequest, spec_set=True) + req_factory = mock.Mock(return_value=req) + req.send = mock.AsyncMock(return_value=resp) + # UnixConnector allows all high level protocols by default and unix sockets + session = await create_session( + connector=UnixConnector(path=""), request_class=req_factory + ) + + connections = [] + assert session._connector is not None + original_connect = session._connector.connect + + async def connect( + req: ClientRequest, traces: List[Trace], timeout: aiohttp.ClientTimeout + ) -> Connection: + conn = await original_connect(req, traces, timeout) + connections.append(conn) + return conn + + async def create_connection( + req: object, traces: object, timeout: object + ) -> ResponseHandler: + return create_mocked_conn() + + connector = session._connector + with mock.patch.object(connector, "connect", connect), mock.patch.object( + connector, "_create_connection", create_connection + ), mock.patch.object(connector, "_release"), mock.patch( + "aiohttp.client.os" + ) as m_os: + m_os.urandom.return_value = key_data + await session.ws_connect(f"{protocol}://example") + + # normally called during garbage collection. triggers an exception + # if the connection wasn't already closed + for c in connections: + c.close() + c.__del__() + + await session.close() + + +async def test_cookie_jar_usage(loop: Any, aiohttp_client: Any) -> None: req_url = None jar = mock.Mock() @@ -792,7 +913,9 @@ async def test_client_session_timeout_args(loop) -> None: with pytest.warns(DeprecationWarning): session2 = ClientSession(loop=loop, read_timeout=20 * 60, conn_timeout=30 * 60) - assert session2._timeout == client.ClientTimeout(total=20 * 60, connect=30 * 60) + assert session2._timeout == client.ClientTimeout( + total=20 * 60, connect=30 * 60, sock_connect=client.DEFAULT_TIMEOUT.sock_connect + ) with pytest.raises(ValueError): ClientSession( @@ -814,13 +937,31 @@ async def test_client_session_timeout_default_args(loop) -> None: await session1.close() -async def test_client_session_timeout_zero() -> None: +async def test_client_session_timeout_zero( + create_mocked_conn: Callable[[], ResponseHandler] +) -> None: + async def create_connection( + req: object, traces: object, timeout: object + ) -> ResponseHandler: + await asyncio.sleep(0.01) + conn = create_mocked_conn() + conn.connected = True # type: ignore[misc] + assert conn.transport is not None + conn.transport.is_closing.return_value = False # type: ignore[attr-defined] + msg = mock.create_autospec(RawResponseMessage, spec_set=True, code=200) + conn.read.return_value = (msg, mock.Mock()) # type: ignore[attr-defined] + return conn + timeout = client.ClientTimeout(total=10, connect=0, sock_connect=0, sock_read=0) - try: - async with ClientSession(timeout=timeout) as session: - await session.get("http://example.com") - except asyncio.TimeoutError: - pytest.fail("0 should disable timeout.") + async with ClientSession(timeout=timeout) as session: + with mock.patch.object( + session._connector, "_create_connection", create_connection + ): + try: + resp = await session.get("http://example.com") + except asyncio.TimeoutError: # pragma: no cover + pytest.fail("0 should disable timeout.") + resp.close() async def test_client_session_timeout_bad_argument() -> None: @@ -895,3 +1036,23 @@ async def test_instantiation_with_invalid_timeout_value(loop): ClientSession(timeout=1) # should not have "Unclosed client session" warning assert not logs + + +@pytest.mark.parametrize( + ("outer_name", "inner_name"), + [ + ("skip_auto_headers", "_skip_auto_headers"), + ("auth", "_default_auth"), + ("json_serialize", "_json_serialize"), + ("connector_owner", "_connector_owner"), + ("raise_for_status", "_raise_for_status"), + ("trust_env", "_trust_env"), + ("trace_configs", "_trace_configs"), + ], +) +async def test_properties( + session: ClientSession, outer_name: str, inner_name: str +) -> None: + value = uuid4() + setattr(session, inner_name, value) + assert value == getattr(session, outer_name) diff --git a/tests/test_client_ws.py b/tests/test_client_ws.py index f0b7757..ec08db0 100644 --- a/tests/test_client_ws.py +++ b/tests/test_client_ws.py @@ -2,33 +2,19 @@ import base64 import hashlib import os +from typing import Any, Type from unittest import mock import pytest import aiohttp -from aiohttp import client, hdrs +from aiohttp import ClientConnectionResetError, ServerDisconnectedError, client, hdrs from aiohttp.http import WS_KEY from aiohttp.streams import EofStream from aiohttp.test_utils import make_mocked_coro -@pytest.fixture -def key_data(): - return os.urandom(16) - - -@pytest.fixture -def key(key_data): - return base64.b64encode(key_data) - - -@pytest.fixture -def ws_key(key): - return base64.b64encode(hashlib.sha1(key + WS_KEY).digest()).decode() - - -async def test_ws_connect(ws_key, loop, key_data) -> None: +async def test_ws_connect(ws_key: Any, loop: Any, key_data: Any) -> None: resp = mock.Mock() resp.status = 101 resp.headers = { @@ -37,6 +23,7 @@ async def test_ws_connect(ws_key, loop, key_data) -> None: hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, hdrs.SEC_WEBSOCKET_PROTOCOL: "chat", } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: m_os.urandom.return_value = key_data @@ -52,6 +39,97 @@ async def test_ws_connect(ws_key, loop, key_data) -> None: assert hdrs.ORIGIN not in m_req.call_args[1]["headers"] +async def test_ws_connect_read_timeout_is_reset_to_inf( + ws_key: Any, loop: Any, key_data: Any +) -> None: + resp = mock.Mock() + resp.status = 101 + resp.headers = { + hdrs.UPGRADE: "websocket", + hdrs.CONNECTION: "upgrade", + hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, + hdrs.SEC_WEBSOCKET_PROTOCOL: "chat", + } + resp.connection.protocol.read_timeout = 0.5 + with mock.patch("aiohttp.client.os") as m_os, mock.patch( + "aiohttp.client.ClientSession.request" + ) as m_req: + m_os.urandom.return_value = key_data + m_req.return_value = loop.create_future() + m_req.return_value.set_result(resp) + + res = await aiohttp.ClientSession().ws_connect( + "http://test.org", protocols=("t1", "t2", "chat") + ) + + assert isinstance(res, client.ClientWebSocketResponse) + assert res.protocol == "chat" + assert hdrs.ORIGIN not in m_req.call_args[1]["headers"] + assert resp.connection.protocol.read_timeout is None + + +async def test_ws_connect_read_timeout_stays_inf( + ws_key: Any, loop: Any, key_data: Any +) -> None: + resp = mock.Mock() + resp.status = 101 + resp.headers = { + hdrs.UPGRADE: "websocket", + hdrs.CONNECTION: "upgrade", + hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, + hdrs.SEC_WEBSOCKET_PROTOCOL: "chat", + } + resp.connection.protocol.read_timeout = None + with mock.patch("aiohttp.client.os") as m_os, mock.patch( + "aiohttp.client.ClientSession.request" + ) as m_req: + m_os.urandom.return_value = key_data + m_req.return_value = loop.create_future() + m_req.return_value.set_result(resp) + + res = await aiohttp.ClientSession().ws_connect( + "http://test.org", + protocols=("t1", "t2", "chat"), + receive_timeout=0.5, + ) + + assert isinstance(res, client.ClientWebSocketResponse) + assert res.protocol == "chat" + assert hdrs.ORIGIN not in m_req.call_args[1]["headers"] + assert resp.connection.protocol.read_timeout is None + + +async def test_ws_connect_read_timeout_reset_to_max( + ws_key: Any, loop: Any, key_data: Any +) -> None: + resp = mock.Mock() + resp.status = 101 + resp.headers = { + hdrs.UPGRADE: "websocket", + hdrs.CONNECTION: "upgrade", + hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, + hdrs.SEC_WEBSOCKET_PROTOCOL: "chat", + } + resp.connection.protocol.read_timeout = 0.5 + with mock.patch("aiohttp.client.os") as m_os, mock.patch( + "aiohttp.client.ClientSession.request" + ) as m_req: + m_os.urandom.return_value = key_data + m_req.return_value = loop.create_future() + m_req.return_value.set_result(resp) + + res = await aiohttp.ClientSession().ws_connect( + "http://test.org", + protocols=("t1", "t2", "chat"), + receive_timeout=1.0, + ) + + assert isinstance(res, client.ClientWebSocketResponse) + assert res.protocol == "chat" + assert hdrs.ORIGIN not in m_req.call_args[1]["headers"] + assert resp.connection.protocol.read_timeout == 1.0 + + async def test_ws_connect_with_origin(key_data, loop) -> None: resp = mock.Mock() resp.status = 403 @@ -82,6 +160,7 @@ async def test_ws_connect_with_params(ws_key, loop, key_data) -> None: hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, hdrs.SEC_WEBSOCKET_PROTOCOL: "chat", } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: m_os.urandom.return_value = key_data @@ -107,6 +186,7 @@ def read(self, decode=False): hdrs.CONNECTION: "upgrade", hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: m_os.urandom.return_value = key_data @@ -229,6 +309,7 @@ async def mock_get(*args, **kwargs): hdrs.SEC_WEBSOCKET_ACCEPT: accept, hdrs.SEC_WEBSOCKET_PROTOCOL: "chat", } + resp.connection.protocol.read_timeout = None return resp with mock.patch("aiohttp.client.os") as m_os: @@ -259,6 +340,7 @@ async def test_close(loop, ws_key, key_data) -> None: hdrs.CONNECTION: "upgrade", hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.WebSocketWriter") as WebSocketWriter: with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: @@ -299,6 +381,7 @@ async def test_close_eofstream(loop, ws_key, key_data) -> None: hdrs.CONNECTION: "upgrade", hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.WebSocketWriter") as WebSocketWriter: with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: @@ -321,20 +404,56 @@ async def test_close_eofstream(loop, ws_key, key_data) -> None: await session.close() -async def test_close_exc(loop, ws_key, key_data) -> None: - resp = mock.Mock() - resp.status = 101 - resp.headers = { +async def test_close_connection_lost( + loop: asyncio.AbstractEventLoop, ws_key: bytes, key_data: bytes +) -> None: + """Test the websocket client handles the connection being closed out from under it.""" + mresp = mock.Mock(spec_set=client.ClientResponse) + mresp.status = 101 + mresp.headers = { hdrs.UPGRADE: "websocket", hdrs.CONNECTION: "upgrade", hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } + mresp.connection.protocol.read_timeout = None + with mock.patch("aiohttp.client.WebSocketWriter"), mock.patch( + "aiohttp.client.os" + ) as m_os, mock.patch("aiohttp.client.ClientSession.request") as m_req: + m_os.urandom.return_value = key_data + m_req.return_value = loop.create_future() + m_req.return_value.set_result(mresp) + + session = aiohttp.ClientSession() + resp = await session.ws_connect("http://test.org") + assert not resp.closed + + exc = ServerDisconnectedError() + resp._reader.set_exception(exc) + + msg = await resp.receive() + assert msg.type is aiohttp.WSMsgType.CLOSED + assert resp.closed + + await session.close() + + +async def test_close_exc( + loop: asyncio.AbstractEventLoop, ws_key: bytes, key_data: bytes +) -> None: + mresp = mock.Mock() + mresp.status = 101 + mresp.headers = { + hdrs.UPGRADE: "websocket", + hdrs.CONNECTION: "upgrade", + hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, + } + mresp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.WebSocketWriter") as WebSocketWriter: with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() - m_req.return_value.set_result(resp) + m_req.return_value.set_result(mresp) writer = mock.Mock() WebSocketWriter.return_value = writer writer.close = make_mocked_coro() @@ -361,6 +480,7 @@ async def test_close_exc2(loop, ws_key, key_data) -> None: hdrs.CONNECTION: "upgrade", hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.WebSocketWriter") as WebSocketWriter: with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: @@ -387,7 +507,13 @@ async def test_close_exc2(loop, ws_key, key_data) -> None: await resp.close() -async def test_send_data_after_close(ws_key, key_data, loop) -> None: +@pytest.mark.parametrize("exc", (ClientConnectionResetError, ConnectionResetError)) +async def test_send_data_after_close( + exc: Type[Exception], + ws_key: bytes, + key_data: bytes, + loop: asyncio.AbstractEventLoop, +) -> None: resp = mock.Mock() resp.status = 101 resp.headers = { @@ -395,6 +521,7 @@ async def test_send_data_after_close(ws_key, key_data, loop) -> None: hdrs.CONNECTION: "upgrade", hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: m_os.urandom.return_value = key_data @@ -411,7 +538,7 @@ async def test_send_data_after_close(ws_key, key_data, loop) -> None: (resp.send_bytes, (b"b",)), (resp.send_json, ({},)), ): - with pytest.raises(ConnectionResetError): + with pytest.raises(exc): # Verify exc can be caught with both classes await meth(*args) @@ -423,6 +550,7 @@ async def test_send_data_type_errors(ws_key, key_data, loop) -> None: hdrs.CONNECTION: "upgrade", hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.WebSocketWriter") as WebSocketWriter: with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: @@ -451,6 +579,7 @@ async def test_reader_read_exception(ws_key, key_data, loop) -> None: hdrs.CONNECTION: "upgrade", hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } + hresp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.WebSocketWriter") as WebSocketWriter: with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: @@ -515,6 +644,7 @@ async def test_ws_connect_non_overlapped_protocols(ws_key, loop, key_data) -> No hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, hdrs.SEC_WEBSOCKET_PROTOCOL: "other,another", } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: m_os.urandom.return_value = key_data @@ -537,6 +667,7 @@ async def test_ws_connect_non_overlapped_protocols_2(ws_key, loop, key_data) -> hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, hdrs.SEC_WEBSOCKET_PROTOCOL: "other,another", } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: m_os.urandom.return_value = key_data @@ -561,6 +692,7 @@ async def test_ws_connect_deflate(loop, ws_key, key_data) -> None: hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, hdrs.SEC_WEBSOCKET_EXTENSIONS: "permessage-deflate", } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: m_os.urandom.return_value = key_data @@ -584,6 +716,7 @@ async def test_ws_connect_deflate_per_message(loop, ws_key, key_data) -> None: hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, hdrs.SEC_WEBSOCKET_EXTENSIONS: "permessage-deflate", } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.WebSocketWriter") as WebSocketWriter: with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: @@ -616,6 +749,7 @@ async def test_ws_connect_deflate_server_not_support(loop, ws_key, key_data) -> hdrs.CONNECTION: "upgrade", hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: m_os.urandom.return_value = key_data @@ -640,6 +774,7 @@ async def test_ws_connect_deflate_notakeover(loop, ws_key, key_data) -> None: hdrs.SEC_WEBSOCKET_EXTENSIONS: "permessage-deflate; " "client_no_context_takeover", } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: m_os.urandom.return_value = key_data @@ -664,6 +799,7 @@ async def test_ws_connect_deflate_client_wbits(loop, ws_key, key_data) -> None: hdrs.SEC_WEBSOCKET_EXTENSIONS: "permessage-deflate; " "client_max_window_bits=10", } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: m_os.urandom.return_value = key_data diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index 6270675..0421fb9 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -1,11 +1,14 @@ import asyncio import sys +from typing import Any, NoReturn +from unittest import mock import pytest import aiohttp -from aiohttp import hdrs, web +from aiohttp import ClientConnectionResetError, ServerTimeoutError, WSMsgType, hdrs, web from aiohttp.http import WSCloseCode +from aiohttp.pytest_plugin import AiohttpClient if sys.version_info >= (3, 11): import asyncio as async_timeout @@ -245,7 +248,7 @@ async def handler(request): await client_ws.close() msg = await ws.receive() - assert msg.type == aiohttp.WSMsgType.CLOSE + assert msg.type is aiohttp.WSMsgType.CLOSE return ws app = web.Application() @@ -256,11 +259,43 @@ async def handler(request): await ws.send_bytes(b"ask") msg = await ws.receive() - assert msg.type == aiohttp.WSMsgType.CLOSING + assert msg.type is aiohttp.WSMsgType.CLOSING await asyncio.sleep(0.01) msg = await ws.receive() - assert msg.type == aiohttp.WSMsgType.CLOSED + assert msg.type is aiohttp.WSMsgType.CLOSED + + +async def test_concurrent_close_multiple_tasks(aiohttp_client: Any) -> None: + async def handler(request): + ws = web.WebSocketResponse() + await ws.prepare(request) + + await ws.receive_bytes() + await ws.send_str("test") + + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.CLOSE + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + ws = await client.ws_connect("/") + + await ws.send_bytes(b"ask") + + task1 = asyncio.create_task(ws.close()) + task2 = asyncio.create_task(ws.close()) + + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.CLOSED + + await task1 + await task2 + + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.CLOSED async def test_concurrent_task_close(aiohttp_client) -> None: @@ -565,7 +600,38 @@ async def handler(request): assert ping_received -async def test_heartbeat_no_pong(aiohttp_client) -> None: +async def test_heartbeat_connection_closed(aiohttp_client: AiohttpClient) -> None: + """Test that the connection is closed while ping is in progress.""" + + async def handler(request: web.Request) -> NoReturn: + ws = web.WebSocketResponse(autoping=False) + await ws.prepare(request) + await ws.receive() + assert False + + app = web.Application() + app.router.add_route("GET", "/", handler) + + client = await aiohttp_client(app) + resp = await client.ws_connect("/", heartbeat=0.1) + ping_count = 0 + # We patch write here to simulate a connection reset error + # since if we closed the connection normally, the client would + # would cancel the heartbeat task and we wouldn't get a ping + assert resp._conn is not None + with mock.patch.object( + resp._conn.transport, "write", side_effect=ClientConnectionResetError + ), mock.patch.object(resp._writer, "ping", wraps=resp._writer.ping) as ping: + await resp.receive() + ping_count = ping.call_count + # Connection should be closed roughly after 1.5x heartbeat. + await asyncio.sleep(0.2) + assert ping_count == 1 + assert resp.close_code is WSCloseCode.ABNORMAL_CLOSURE + + +async def test_heartbeat_no_pong(aiohttp_client: AiohttpClient) -> None: + """Test that the connection is closed if no pong is received without sending messages.""" ping_received = False async def handler(request): @@ -590,8 +656,155 @@ async def handler(request): assert resp.close_code is WSCloseCode.ABNORMAL_CLOSURE -async def test_send_recv_compress(aiohttp_client) -> None: +async def test_heartbeat_no_pong_after_receive_many_messages( + aiohttp_client: AiohttpClient, +) -> None: + """Test that the connection is closed if no pong is received after receiving many messages.""" + ping_received = False + + async def handler(request: web.Request) -> NoReturn: + nonlocal ping_received + ws = web.WebSocketResponse(autoping=False) + await ws.prepare(request) + for _ in range(5): + await ws.send_str("test") + await asyncio.sleep(0.05) + for _ in range(5): + await ws.send_str("test") + msg = await ws.receive() + ping_received = msg.type is aiohttp.WSMsgType.PING + await ws.receive() + assert False + + app = web.Application() + app.router.add_route("GET", "/", handler) + + client = await aiohttp_client(app) + resp = await client.ws_connect("/", heartbeat=0.1) + + for _ in range(10): + test_msg = await resp.receive() + assert test_msg.data == "test" + # Connection should be closed roughly after 1.5x heartbeat. + + await asyncio.sleep(0.2) + assert ping_received + assert resp.close_code is WSCloseCode.ABNORMAL_CLOSURE + + +async def test_heartbeat_no_pong_after_send_many_messages( + aiohttp_client: AiohttpClient, +) -> None: + """Test that the connection is closed if no pong is received after sending many messages.""" + ping_received = False + + async def handler(request: web.Request) -> NoReturn: + nonlocal ping_received + ws = web.WebSocketResponse(autoping=False) + await ws.prepare(request) + for _ in range(10): + msg = await ws.receive() + assert msg.data == "test" + assert msg.type is aiohttp.WSMsgType.TEXT + msg = await ws.receive() + ping_received = msg.type is aiohttp.WSMsgType.PING + await ws.receive() + assert False + + app = web.Application() + app.router.add_route("GET", "/", handler) + + client = await aiohttp_client(app) + resp = await client.ws_connect("/", heartbeat=0.1) + + for _ in range(5): + await resp.send_str("test") + await asyncio.sleep(0.05) + for _ in range(5): + await resp.send_str("test") + # Connection should be closed roughly after 1.5x heartbeat. + await asyncio.sleep(0.2) + assert ping_received + assert resp.close_code is WSCloseCode.ABNORMAL_CLOSURE + + +async def test_heartbeat_no_pong_concurrent_receive( + aiohttp_client: AiohttpClient, +) -> None: + ping_received = False + async def handler(request): + nonlocal ping_received + ws = web.WebSocketResponse(autoping=False) + await ws.prepare(request) + msg = await ws.receive() + ping_received = msg.type is aiohttp.WSMsgType.PING + ws._reader.feed_eof = lambda: None + await asyncio.sleep(10.0) + + app = web.Application() + app.router.add_route("GET", "/", handler) + + client = await aiohttp_client(app) + resp = await client.ws_connect("/", heartbeat=0.1) + resp._reader.feed_eof = lambda: None + + # Connection should be closed roughly after 1.5x heartbeat. + msg = await resp.receive(5.0) + assert ping_received + assert resp.close_code is WSCloseCode.ABNORMAL_CLOSURE + assert msg + assert msg.type is WSMsgType.ERROR + assert isinstance(msg.data, ServerTimeoutError) + + +async def test_close_websocket_while_ping_inflight( + aiohttp_client: AiohttpClient, +) -> None: + """Test closing the websocket while a ping is in-flight.""" + ping_received = False + + async def handler(request: web.Request) -> NoReturn: + nonlocal ping_received + ws = web.WebSocketResponse(autoping=False) + await ws.prepare(request) + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.BINARY + msg = await ws.receive() + ping_received = msg.type is aiohttp.WSMsgType.PING + await ws.receive() + assert False + + app = web.Application() + app.router.add_route("GET", "/", handler) + + client = await aiohttp_client(app) + resp = await client.ws_connect("/", heartbeat=0.1) + await resp.send_bytes(b"ask") + + cancelled = False + ping_stated = False + + async def delayed_ping() -> None: + nonlocal cancelled, ping_stated + ping_stated = True + try: + await asyncio.sleep(1) + except asyncio.CancelledError: + cancelled = True + raise + + with mock.patch.object(resp._writer, "ping", delayed_ping): + await asyncio.sleep(0.1) + + await resp.close() + await asyncio.sleep(0) + assert ping_stated is True + assert cancelled is True + + +async def test_send_recv_compress(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.WebSocketResponse: ws = web.WebSocketResponse() await ws.prepare(request) @@ -835,3 +1048,11 @@ async def handler(request): assert "answer" == msg.data await resp.close() + + +async def test_ws_connect_with_wrong_ssl_type(aiohttp_client: AiohttpClient) -> None: + app = web.Application() + session = await aiohttp_client(app) + + with pytest.raises(TypeError, match="ssl should be SSLContext, .*"): + await session.ws_connect("/", ssl=42) diff --git a/tests/test_connector.py b/tests/test_connector.py index 02e48bc..94eeb3c 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -1,5 +1,4 @@ # Tests of http client with custom Connector - import asyncio import gc import hashlib @@ -9,18 +8,33 @@ import sys import uuid from collections import deque -from contextlib import closing +from concurrent import futures +from contextlib import closing, suppress +from typing import Any, List, Literal, Optional, Sequence, Tuple from unittest import mock import pytest +from aiohappyeyeballs import AddrInfoType from yarl import URL import aiohttp -from aiohttp import client, web -from aiohttp.client import ClientRequest, ClientTimeout +from aiohttp import ( + ClientRequest, + ClientTimeout, + client, + connector as connector_module, + web, +) +from aiohttp.client_proto import ResponseHandler from aiohttp.client_reqrep import ConnectionKey -from aiohttp.connector import Connection, TCPConnector, _DNSCacheTable -from aiohttp.locks import EventResultOrError +from aiohttp.connector import ( + _SSL_CONTEXT_UNVERIFIED, + _SSL_CONTEXT_VERIFIED, + Connection, + TCPConnector, + _DNSCacheTable, +) +from aiohttp.resolver import ResolveResult from aiohttp.test_utils import make_mocked_coro, unused_port from aiohttp.tracing import Trace @@ -539,7 +553,9 @@ async def test__drop_acquire_per_host3(loop) -> None: assert conn._acquired_per_host[123] == {789} -async def test_tcp_connector_certificate_error(loop) -> None: +async def test_tcp_connector_certificate_error( + loop: Any, start_connection: mock.AsyncMock +) -> None: req = ClientRequest("GET", URL("https://127.0.0.1:443"), loop=loop) async def certificate_error(*args, **kwargs): @@ -556,8 +572,10 @@ async def certificate_error(*args, **kwargs): assert isinstance(ctx.value, aiohttp.ClientSSLError) -async def test_tcp_connector_server_hostname_default(loop) -> None: - conn = aiohttp.TCPConnector(loop=loop) +async def test_tcp_connector_server_hostname_default( + loop: Any, start_connection: mock.AsyncMock +) -> None: + conn = aiohttp.TCPConnector() with mock.patch.object( conn._loop, "create_connection", autospec=True, spec_set=True @@ -570,8 +588,10 @@ async def test_tcp_connector_server_hostname_default(loop) -> None: assert create_connection.call_args.kwargs["server_hostname"] == "127.0.0.1" -async def test_tcp_connector_server_hostname_override(loop) -> None: - conn = aiohttp.TCPConnector(loop=loop) +async def test_tcp_connector_server_hostname_override( + loop: Any, start_connection: mock.AsyncMock +) -> None: + conn = aiohttp.TCPConnector() with mock.patch.object( conn._loop, "create_connection", autospec=True, spec_set=True @@ -595,6 +615,7 @@ async def test_tcp_connector_multiple_hosts_errors(loop) -> None: ip4 = "192.168.1.4" ip5 = "192.168.1.5" ips = [ip1, ip2, ip3, ip4, ip5] + addrs_tried = [] ips_tried = [] fingerprint = hashlib.sha256(b"foo").digest() @@ -624,11 +645,24 @@ async def _resolve_host(host, port, traces=None): os_error = certificate_error = ssl_error = fingerprint_error = False connected = False + async def start_connection(*args, **kwargs): + addr_infos: List[AddrInfoType] = kwargs["addr_infos"] + + first_addr_info = addr_infos[0] + first_addr_info_addr = first_addr_info[-1] + addrs_tried.append(first_addr_info_addr) + + mock_socket = mock.create_autospec(socket.socket, spec_set=True, instance=True) + mock_socket.getpeername.return_value = first_addr_info_addr + return mock_socket + async def create_connection(*args, **kwargs): nonlocal os_error, certificate_error, ssl_error, fingerprint_error nonlocal connected - ip = args[1] + sock = kwargs["sock"] + addr_info = sock.getpeername() + ip = addr_info[0] ips_tried.append(ip) @@ -645,6 +679,12 @@ async def create_connection(*args, **kwargs): raise ssl.SSLError if ip == ip4: + sock: socket.socket = kwargs["sock"] + + # Close the socket since we are not actually connecting + # and we don't want to leak it. + sock.close() + fingerprint_error = True tr, pr = mock.Mock(), mock.Mock() @@ -660,12 +700,21 @@ def get_extra_info(param): if param == "peername": return ("192.168.1.5", 12345) + if param == "socket": + return sock + assert False, param tr.get_extra_info = get_extra_info return tr, pr if ip == ip5: + sock: socket.socket = kwargs["sock"] + + # Close the socket since we are not actually connecting + # and we don't want to leak it. + sock.close() + connected = True tr, pr = mock.Mock(), mock.Mock() @@ -685,10 +734,21 @@ def get_extra_info(param): assert False - conn._loop.create_connection = create_connection - - established_connection = await conn.connect(req, [], ClientTimeout()) - assert ips == ips_tried + with mock.patch.object( + conn, "_resolve_host", autospec=True, spec_set=True, side_effect=_resolve_host + ), mock.patch.object( + conn._loop, + "create_connection", + autospec=True, + spec_set=True, + side_effect=create_connection, + ), mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", start_connection + ): + established_connection = await conn.connect(req, [], ClientTimeout()) + + assert ips_tried == ips + assert addrs_tried == [(ip, 443) for ip in ips] assert os_error assert certificate_error @@ -699,8 +759,329 @@ def get_extra_info(param): established_connection.close() -async def test_tcp_connector_resolve_host(loop) -> None: - conn = aiohttp.TCPConnector(loop=loop, use_dns_cache=True) +@pytest.mark.parametrize( + ("happy_eyeballs_delay"), + [0.1, 0.25, None], +) +async def test_tcp_connector_happy_eyeballs( + loop: Any, happy_eyeballs_delay: Optional[float] +) -> None: + conn = aiohttp.TCPConnector(happy_eyeballs_delay=happy_eyeballs_delay) + + ip1 = "dead::beef::" + ip2 = "192.168.1.1" + ips = [ip1, ip2] + addrs_tried = [] + + req = ClientRequest( + "GET", + URL("https://mocked.host"), + loop=loop, + ) + + async def _resolve_host(host, port, traces=None): + return [ + { + "hostname": host, + "host": ip, + "port": port, + "family": socket.AF_INET6 if ":" in ip else socket.AF_INET, + "proto": 0, + "flags": socket.AI_NUMERICHOST, + } + for ip in ips + ] + + conn._resolve_host = _resolve_host + + os_error = False + connected = False + + async def sock_connect(*args, **kwargs): + addr = args[1] + nonlocal os_error + + addrs_tried.append(addr) + + if addr[0] == ip1: + os_error = True + raise OSError + + async def create_connection(*args, **kwargs): + sock: socket.socket = kwargs["sock"] + + # Close the socket since we are not actually connecting + # and we don't want to leak it. + sock.close() + + nonlocal connected + connected = True + tr = create_mocked_conn(loop) + pr = create_mocked_conn(loop) + return tr, pr + + conn._loop.sock_connect = sock_connect + conn._loop.create_connection = create_connection + + established_connection = await conn.connect(req, [], ClientTimeout()) + + assert addrs_tried == [(ip1, 443, 0, 0), (ip2, 443)] + + assert os_error + assert connected + + established_connection.close() + + +async def test_tcp_connector_interleave(loop: Any) -> None: + conn = aiohttp.TCPConnector(interleave=2) + + ip1 = "192.168.1.1" + ip2 = "192.168.1.2" + ip3 = "dead::beef::" + ip4 = "aaaa::beef::" + ip5 = "192.168.1.5" + ips = [ip1, ip2, ip3, ip4, ip5] + success_ips = [] + interleave = None + + req = ClientRequest( + "GET", + URL("https://mocked.host"), + loop=loop, + ) + + async def _resolve_host(host, port, traces=None): + return [ + { + "hostname": host, + "host": ip, + "port": port, + "family": socket.AF_INET6 if ":" in ip else socket.AF_INET, + "proto": 0, + "flags": socket.AI_NUMERICHOST, + } + for ip in ips + ] + + conn._resolve_host = _resolve_host + + async def start_connection(*args, **kwargs): + nonlocal interleave + addr_infos: List[AddrInfoType] = kwargs["addr_infos"] + interleave = kwargs["interleave"] + # Mock the 4th host connecting successfully + fourth_addr_info = addr_infos[3] + fourth_addr_info_addr = fourth_addr_info[-1] + mock_socket = mock.create_autospec(socket.socket, spec_set=True, instance=True) + mock_socket.getpeername.return_value = fourth_addr_info_addr + return mock_socket + + async def create_connection(*args, **kwargs): + sock = kwargs["sock"] + addr_info = sock.getpeername() + ip = addr_info[0] + + success_ips.append(ip) + + sock: socket.socket = kwargs["sock"] + # Close the socket since we are not actually connecting + # and we don't want to leak it. + sock.close() + tr = create_mocked_conn(loop) + pr = create_mocked_conn(loop) + return tr, pr + + with mock.patch.object( + conn, "_resolve_host", autospec=True, spec_set=True, side_effect=_resolve_host + ), mock.patch.object( + conn._loop, + "create_connection", + autospec=True, + spec_set=True, + side_effect=create_connection, + ), mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", start_connection + ): + established_connection = await conn.connect(req, [], ClientTimeout()) + + assert success_ips == [ip4] + assert interleave == 2 + established_connection.close() + + +async def test_tcp_connector_family_is_respected(loop: Any) -> None: + conn = aiohttp.TCPConnector(family=socket.AF_INET) + + ip1 = "dead::beef::" + ip2 = "192.168.1.1" + ips = [ip1, ip2] + addrs_tried = [] + + req = ClientRequest( + "GET", + URL("https://mocked.host"), + loop=loop, + ) + + async def _resolve_host(host, port, traces=None): + return [ + { + "hostname": host, + "host": ip, + "port": port, + "family": socket.AF_INET6 if ":" in ip else socket.AF_INET, + "proto": 0, + "flags": socket.AI_NUMERICHOST, + } + for ip in ips + ] + + conn._resolve_host = _resolve_host + connected = False + + async def sock_connect(*args, **kwargs): + addr = args[1] + addrs_tried.append(addr) + + async def create_connection(*args, **kwargs): + sock: socket.socket = kwargs["sock"] + + # Close the socket since we are not actually connecting + # and we don't want to leak it. + sock.close() + + nonlocal connected + connected = True + tr = create_mocked_conn(loop) + pr = create_mocked_conn(loop) + return tr, pr + + conn._loop.sock_connect = sock_connect + conn._loop.create_connection = create_connection + + established_connection = await conn.connect(req, [], ClientTimeout()) + + # We should only try the IPv4 address since we specified + # the family to be AF_INET + assert addrs_tried == [(ip2, 443)] + + assert connected + + established_connection.close() + + +@pytest.mark.parametrize( + ("request_url"), + [ + ("http://mocked.host"), + ("https://mocked.host"), + ], +) +async def test_tcp_connector_multiple_hosts_one_timeout( + loop: asyncio.AbstractEventLoop, + request_url: str, +) -> None: + conn = aiohttp.TCPConnector() + + ip1 = "192.168.1.1" + ip2 = "192.168.1.2" + ips = [ip1, ip2] + ips_tried = [] + ips_success = [] + timeout_error = False + connected = False + + req = ClientRequest( + "GET", + URL(request_url), + loop=loop, + ) + + async def _resolve_host( + host: str, port: int, traces: object = None + ) -> List[ResolveResult]: + return [ + { + "hostname": host, + "host": ip, + "port": port, + "family": socket.AF_INET6 if ":" in ip else socket.AF_INET, + "proto": 0, + "flags": socket.AI_NUMERICHOST, + } + for ip in ips + ] + + async def start_connection( + addr_infos: Sequence[AddrInfoType], + *, + interleave: Optional[int] = None, + **kwargs: object, + ) -> socket.socket: + nonlocal timeout_error + + addr_info = addr_infos[0] + addr_info_addr = addr_info[-1] + + ip = addr_info_addr[0] + ips_tried.append(ip) + + if ip == ip1: + timeout_error = True + raise asyncio.TimeoutError + + if ip == ip2: + mock_socket = mock.create_autospec( + socket.socket, spec_set=True, instance=True + ) + mock_socket.getpeername.return_value = addr_info_addr + return mock_socket # type: ignore[no-any-return] + + assert False + + async def create_connection( + *args: object, sock: Optional[socket.socket] = None, **kwargs: object + ) -> Tuple[ResponseHandler, ResponseHandler]: + nonlocal connected + + assert isinstance(sock, socket.socket) + addr_info = sock.getpeername() + ip = addr_info[0] + ips_success.append(ip) + connected = True + + # Close the socket since we are not actually connecting + # and we don't want to leak it. + sock.close() + tr = create_mocked_conn(loop) + pr = create_mocked_conn(loop) + return tr, pr + + with mock.patch.object( + conn, "_resolve_host", autospec=True, spec_set=True, side_effect=_resolve_host + ), mock.patch.object( + conn._loop, + "create_connection", + autospec=True, + spec_set=True, + side_effect=create_connection, + ), mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", start_connection + ): + established_connection = await conn.connect(req, [], ClientTimeout()) + + assert ips_tried == ips + assert ips_success == [ip2] + + assert timeout_error + assert connected + + established_connection.close() + + +async def test_tcp_connector_resolve_host(loop: asyncio.AbstractEventLoop) -> None: + conn = aiohttp.TCPConnector(use_dns_cache=True) res = await conn._resolve_host("localhost", 8080) assert res @@ -723,6 +1104,7 @@ def dns_response(loop): async def coro(): # simulates a network operation await asyncio.sleep(0) + await asyncio.sleep(0) return ["127.0.0.1"] return coro @@ -1233,7 +1615,19 @@ async def test_tcp_connector_ctor() -> None: assert conn.family == 0 -async def test_tcp_connector_ctor_fingerprint_valid(loop) -> None: +async def test_tcp_connector_allowed_protocols(loop: asyncio.AbstractEventLoop) -> None: + conn = aiohttp.TCPConnector() + assert conn.allowed_protocol_schema_set == {"", "tcp", "http", "https", "ws", "wss"} + + +async def test_invalid_ssl_param() -> None: + with pytest.raises(TypeError): + aiohttp.TCPConnector(ssl=object()) # type: ignore[arg-type] + + +async def test_tcp_connector_ctor_fingerprint_valid( + loop: asyncio.AbstractEventLoop, +) -> None: valid = aiohttp.Fingerprint(hashlib.sha256(b"foo").digest()) conn = aiohttp.TCPConnector(ssl=valid, loop=loop) assert conn._ssl is valid @@ -1280,20 +1674,8 @@ async def test_tcp_connector_clear_dns_cache_bad_args(loop) -> None: conn.clear_dns_cache("localhost") -async def test_dont_recreate_ssl_context(loop) -> None: - conn = aiohttp.TCPConnector(loop=loop) - ctx = conn._make_ssl_context(True) - assert ctx is conn._make_ssl_context(True) - - -async def test_dont_recreate_ssl_context2(loop) -> None: - conn = aiohttp.TCPConnector(loop=loop) - ctx = conn._make_ssl_context(False) - assert ctx is conn._make_ssl_context(False) - - -async def test___get_ssl_context1(loop) -> None: - conn = aiohttp.TCPConnector(loop=loop) +async def test___get_ssl_context1() -> None: + conn = aiohttp.TCPConnector() req = mock.Mock() req.is_ssl.return_value = False assert conn._get_ssl_context(req) is None @@ -1323,7 +1705,7 @@ async def test___get_ssl_context4(loop) -> None: req = mock.Mock() req.is_ssl.return_value = True req.ssl = False - assert conn._get_ssl_context(req) is conn._make_ssl_context(False) + assert conn._get_ssl_context(req) is _SSL_CONTEXT_UNVERIFIED async def test___get_ssl_context5(loop) -> None: @@ -1332,15 +1714,29 @@ async def test___get_ssl_context5(loop) -> None: req = mock.Mock() req.is_ssl.return_value = True req.ssl = aiohttp.Fingerprint(hashlib.sha256(b"1").digest()) - assert conn._get_ssl_context(req) is conn._make_ssl_context(False) + assert conn._get_ssl_context(req) is _SSL_CONTEXT_UNVERIFIED -async def test___get_ssl_context6(loop) -> None: - conn = aiohttp.TCPConnector(loop=loop) +async def test___get_ssl_context6() -> None: + conn = aiohttp.TCPConnector() req = mock.Mock() req.is_ssl.return_value = True req.ssl = True - assert conn._get_ssl_context(req) is conn._make_ssl_context(True) + assert conn._get_ssl_context(req) is _SSL_CONTEXT_VERIFIED + + +async def test_ssl_context_once() -> None: + """Test the ssl context is created only once and shared between connectors.""" + conn1 = aiohttp.TCPConnector() + conn2 = aiohttp.TCPConnector() + conn3 = aiohttp.TCPConnector() + + req = mock.Mock() + req.is_ssl.return_value = True + req.ssl = True + assert conn1._get_ssl_context(req) is _SSL_CONTEXT_VERIFIED + assert conn2._get_ssl_context(req) is _SSL_CONTEXT_VERIFIED + assert conn3._get_ssl_context(req) is _SSL_CONTEXT_VERIFIED async def test_close_twice(loop) -> None: @@ -1367,7 +1763,336 @@ async def test_close_cancels_cleanup_handle(loop) -> None: assert conn._cleanup_handle is None -async def test_close_abort_closed_transports(loop) -> None: +async def test_close_cancels_resolve_host(loop: asyncio.AbstractEventLoop) -> None: + cancelled = False + + async def delay_resolve(*args: object, **kwargs: object) -> None: + """Delay resolve() task in order to test cancellation.""" + nonlocal cancelled + try: + await asyncio.sleep(10) + except asyncio.CancelledError: + cancelled = True + raise + + conn = aiohttp.TCPConnector() + req = ClientRequest( + "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() + ) + with mock.patch.object(conn._resolver, "resolve", delay_resolve): + t = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + # Let it create the internal task + await asyncio.sleep(0) + # Let that task start running + await asyncio.sleep(0) + + # We now have a task being tracked and can ensure that .close() cancels it. + assert len(conn._resolve_host_tasks) == 1 + await conn.close() + await asyncio.sleep(0.01) + assert cancelled + assert len(conn._resolve_host_tasks) == 0 + + with suppress(asyncio.CancelledError): + await t + + +async def test_multiple_dns_resolution_requests_success( + loop: asyncio.AbstractEventLoop, +) -> None: + """Verify that multiple DNS resolution requests are handled correctly.""" + + async def delay_resolve(*args: object, **kwargs: object) -> List[ResolveResult]: + """Delayed resolve() task.""" + for _ in range(3): + await asyncio.sleep(0) + return [ + { + "hostname": "localhost", + "host": "127.0.0.1", + "port": 80, + "family": socket.AF_INET, + "proto": 0, + "flags": socket.AI_NUMERICHOST, + }, + ] + + conn = aiohttp.TCPConnector(force_close=True) + req = ClientRequest( + "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() + ) + with mock.patch.object(conn._resolver, "resolve", delay_resolve), mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + side_effect=OSError(1, "Forced connection to fail"), + ): + task1 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + + # Let it create the internal task + await asyncio.sleep(0) + # Let that task start running + await asyncio.sleep(0) + + # Ensure the task is running + assert len(conn._resolve_host_tasks) == 1 + + task2 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + task3 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + + with pytest.raises( + aiohttp.ClientConnectorError, match="Forced connection to fail" + ): + await task1 + + # Verify the the task is finished + assert len(conn._resolve_host_tasks) == 0 + + with pytest.raises( + aiohttp.ClientConnectorError, match="Forced connection to fail" + ): + await task2 + with pytest.raises( + aiohttp.ClientConnectorError, match="Forced connection to fail" + ): + await task3 + + +async def test_multiple_dns_resolution_requests_failure( + loop: asyncio.AbstractEventLoop, +) -> None: + """Verify that DNS resolution failure for multiple requests is handled correctly.""" + + async def delay_resolve(*args: object, **kwargs: object) -> List[ResolveResult]: + """Delayed resolve() task.""" + for _ in range(3): + await asyncio.sleep(0) + raise OSError(None, "DNS Resolution mock failure") + + conn = aiohttp.TCPConnector(force_close=True) + req = ClientRequest( + "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() + ) + with mock.patch.object(conn._resolver, "resolve", delay_resolve), mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + side_effect=OSError(1, "Forced connection to fail"), + ): + task1 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + + # Let it create the internal task + await asyncio.sleep(0) + # Let that task start running + await asyncio.sleep(0) + + # Ensure the task is running + assert len(conn._resolve_host_tasks) == 1 + + task2 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + task3 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + + with pytest.raises( + aiohttp.ClientConnectorError, match="DNS Resolution mock failure" + ): + await task1 + + # Verify the the task is finished + assert len(conn._resolve_host_tasks) == 0 + + with pytest.raises( + aiohttp.ClientConnectorError, match="DNS Resolution mock failure" + ): + await task2 + with pytest.raises( + aiohttp.ClientConnectorError, match="DNS Resolution mock failure" + ): + await task3 + + +async def test_multiple_dns_resolution_requests_cancelled( + loop: asyncio.AbstractEventLoop, +) -> None: + """Verify that DNS resolution cancellation does not affect other tasks.""" + + async def delay_resolve(*args: object, **kwargs: object) -> List[ResolveResult]: + """Delayed resolve() task.""" + for _ in range(3): + await asyncio.sleep(0) + raise OSError(None, "DNS Resolution mock failure") + + conn = aiohttp.TCPConnector(force_close=True) + req = ClientRequest( + "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() + ) + with mock.patch.object(conn._resolver, "resolve", delay_resolve), mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + side_effect=OSError(1, "Forced connection to fail"), + ): + task1 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + + # Let it create the internal task + await asyncio.sleep(0) + # Let that task start running + await asyncio.sleep(0) + + # Ensure the task is running + assert len(conn._resolve_host_tasks) == 1 + + task2 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + task3 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + + task1.cancel() + with pytest.raises(asyncio.CancelledError): + await task1 + + with pytest.raises( + aiohttp.ClientConnectorError, match="DNS Resolution mock failure" + ): + await task2 + with pytest.raises( + aiohttp.ClientConnectorError, match="DNS Resolution mock failure" + ): + await task3 + + # Verify the the task is finished + assert len(conn._resolve_host_tasks) == 0 + + +async def test_multiple_dns_resolution_requests_first_cancelled( + loop: asyncio.AbstractEventLoop, +) -> None: + """Verify that first DNS resolution cancellation does not make other resolutions fail.""" + + async def delay_resolve(*args: object, **kwargs: object) -> List[ResolveResult]: + """Delayed resolve() task.""" + for _ in range(3): + await asyncio.sleep(0) + return [ + { + "hostname": "localhost", + "host": "127.0.0.1", + "port": 80, + "family": socket.AF_INET, + "proto": 0, + "flags": socket.AI_NUMERICHOST, + }, + ] + + conn = aiohttp.TCPConnector(force_close=True) + req = ClientRequest( + "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() + ) + with mock.patch.object(conn._resolver, "resolve", delay_resolve), mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + side_effect=OSError(1, "Forced connection to fail"), + ): + task1 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + + # Let it create the internal task + await asyncio.sleep(0) + # Let that task start running + await asyncio.sleep(0) + + # Ensure the task is running + assert len(conn._resolve_host_tasks) == 1 + + task2 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + task3 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + + task1.cancel() + with pytest.raises(asyncio.CancelledError): + await task1 + + # The second and third tasks should still make the connection + # even if the first one is cancelled + with pytest.raises( + aiohttp.ClientConnectorError, match="Forced connection to fail" + ): + await task2 + with pytest.raises( + aiohttp.ClientConnectorError, match="Forced connection to fail" + ): + await task3 + + # Verify the the task is finished + assert len(conn._resolve_host_tasks) == 0 + + +async def test_multiple_dns_resolution_requests_first_fails_second_successful( + loop: asyncio.AbstractEventLoop, +) -> None: + """Verify that first DNS resolution fails the first time and is successful the second time.""" + attempt = 0 + + async def delay_resolve(*args: object, **kwargs: object) -> List[ResolveResult]: + """Delayed resolve() task.""" + nonlocal attempt + for _ in range(3): + await asyncio.sleep(0) + attempt += 1 + if attempt == 1: + raise OSError(None, "DNS Resolution mock failure") + return [ + { + "hostname": "localhost", + "host": "127.0.0.1", + "port": 80, + "family": socket.AF_INET, + "proto": 0, + "flags": socket.AI_NUMERICHOST, + }, + ] + + conn = aiohttp.TCPConnector(force_close=True) + req = ClientRequest( + "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() + ) + with mock.patch.object(conn._resolver, "resolve", delay_resolve), mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + side_effect=OSError(1, "Forced connection to fail"), + ): + task1 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + + # Let it create the internal task + await asyncio.sleep(0) + # Let that task start running + await asyncio.sleep(0) + + # Ensure the task is running + assert len(conn._resolve_host_tasks) == 1 + + task2 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + + with pytest.raises( + aiohttp.ClientConnectorError, match="DNS Resolution mock failure" + ): + await task1 + + assert len(conn._resolve_host_tasks) == 0 + # The second task should also get the dns resolution failure + with pytest.raises( + aiohttp.ClientConnectorError, match="DNS Resolution mock failure" + ): + await task2 + + # The third task is created after the resolution finished so + # it should try again and succeed + task3 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + # Let it create the internal task + await asyncio.sleep(0) + # Let that task start running + await asyncio.sleep(0) + + # Ensure the task is running + assert len(conn._resolve_host_tasks) == 1 + + with pytest.raises( + aiohttp.ClientConnectorError, match="Forced connection to fail" + ): + await task3 + + # Verify the the task is finished + assert len(conn._resolve_host_tasks) == 0 + + +async def test_close_abort_closed_transports(loop: asyncio.AbstractEventLoop) -> None: tr = mock.Mock() conn = aiohttp.BaseConnector(loop=loop) @@ -1391,8 +2116,23 @@ async def test_ctor_with_default_loop(loop) -> None: assert loop is conn._loop -async def test_connect_with_limit(loop, key) -> None: - proto = mock.Mock() +async def test_base_connector_allows_high_level_protocols( + loop: asyncio.AbstractEventLoop, +) -> None: + conn = aiohttp.BaseConnector() + assert conn.allowed_protocol_schema_set == { + "", + "http", + "https", + "ws", + "wss", + } + + +async def test_connect_with_limit( + loop: asyncio.AbstractEventLoop, key: ConnectionKey +) -> None: + proto = create_mocked_conn(loop) proto.is_connected.return_value = True req = ClientRequest( @@ -2047,7 +2787,8 @@ async def handler(request): session = aiohttp.ClientSession(connector=conn) url = srv.make_url("/") - with pytest.raises(aiohttp.ClientConnectorCertificateError) as ctx: + err = aiohttp.ClientConnectorCertificateError + with pytest.raises(err) as ctx: await session.get(url) assert isinstance(ctx.value, aiohttp.ClientConnectorCertificateError) @@ -2163,6 +2904,14 @@ async def handler(request): connector = aiohttp.UnixConnector(unix_sockname) assert unix_sockname == connector.path + assert connector.allowed_protocol_schema_set == { + "", + "http", + "https", + "ws", + "wss", + "unix", + } session = client.ClientSession(connector=connector) r = await session.get(url) @@ -2188,6 +2937,14 @@ async def handler(request): connector = aiohttp.NamedPipeConnector(pipe_name) assert pipe_name == connector.path + assert connector.allowed_protocol_schema_set == { + "", + "http", + "https", + "ws", + "wss", + "npipe", + } session = client.ClientSession(connector=connector) r = await session.get(url) @@ -2300,14 +3057,18 @@ async def test_connector_throttle_trace_race(loop): key = ("", 0) token = object() - class DummyTracer: - async def send_dns_cache_hit(self, *args, **kwargs): - event = connector._throttle_dns_events.pop(key) - event.set() + class DummyTracer(Trace): + def __init__(self) -> None: + """Dummy""" + + async def send_dns_cache_hit(self, *args: object, **kwargs: object) -> None: + futures = connector._throttle_dns_futures.pop(key) + for fut in futures: + fut.set_result(None) connector._cached_hosts.add(key, [token]) connector = TCPConnector() - connector._throttle_dns_events[key] = EventResultOrError(loop) + connector._throttle_dns_futures[key] = set() traces = [DummyTracer()] assert await connector._resolve_host("", 0, traces) == [token] @@ -2351,3 +3112,42 @@ async def allow_connection_and_add_dummy_waiter(): ) await connector.close() + + +def test_connector_multiple_event_loop() -> None: + """Test the connector with multiple event loops.""" + + async def async_connect() -> Literal[True]: + conn = aiohttp.TCPConnector() + loop = asyncio.get_running_loop() + req = ClientRequest("GET", URL("https://127.0.0.1"), loop=loop) + with suppress(aiohttp.ClientConnectorError): + with mock.patch.object( + conn._loop, + "create_connection", + autospec=True, + spec_set=True, + side_effect=ssl.CertificateError, + ): + await conn.connect(req, [], ClientTimeout()) + return True + + def test_connect() -> Literal[True]: + loop = asyncio.new_event_loop() + try: + return loop.run_until_complete(async_connect()) + finally: + loop.close() + + with futures.ThreadPoolExecutor() as executor: + res_list = [executor.submit(test_connect) for _ in range(2)] + raw_response_list = [res.result() for res in futures.as_completed(res_list)] + + assert raw_response_list == [True, True] + + +def test_default_ssl_context_creation_without_ssl() -> None: + """Verify _make_ssl_context does not raise when ssl is not available.""" + with mock.patch.object(connector_module, "ssl", None): + assert connector_module._make_ssl_context(False) is None + assert connector_module._make_ssl_context(True) is None diff --git a/tests/test_cookiejar.py b/tests/test_cookiejar.py index 9c60895..248d0d4 100644 --- a/tests/test_cookiejar.py +++ b/tests/test_cookiejar.py @@ -1,10 +1,12 @@ import asyncio import datetime +import heapq import itertools import pathlib import pickle import unittest from http.cookies import BaseCookie, Morsel, SimpleCookie +from operator import not_ from unittest import mock import pytest @@ -153,28 +155,6 @@ def test_domain_matching() -> None: assert not test_func("test.com", "127.0.0.1") -def test_path_matching() -> None: - test_func = CookieJar._is_path_match - - assert test_func("/", "") - assert test_func("", "/") - assert test_func("/file", "") - assert test_func("/folder/file", "") - assert test_func("/", "/") - assert test_func("/file", "/") - assert test_func("/file", "/file") - assert test_func("/folder/", "/folder/") - assert test_func("/folder/", "/") - assert test_func("/folder/file", "/") - - assert not test_func("/", "/file") - assert not test_func("/", "/folder/") - assert not test_func("/file", "/folder/file") - assert not test_func("/folder/", "/folder/file") - assert not test_func("/different-file", "/file") - assert not test_func("/different-folder/", "/folder/") - - async def test_constructor(loop, cookies_to_send, cookies_to_receive) -> None: jar = CookieJar(loop=loop) jar.update_cookies(cookies_to_send) @@ -243,8 +223,98 @@ async def test_filter_cookie_with_unicode_domain(loop) -> None: assert len(jar.filter_cookies(URL("http://xn--9caa.com"))) == 1 -async def test_domain_filter_ip_cookie_send(loop) -> None: - jar = CookieJar(loop=loop) +@pytest.mark.parametrize( + ("url", "expected_cookies"), + ( + ( + "http://pathtest.com/one/two/", + ( + "no-path-cookie", + "path1-cookie", + "path2-cookie", + "shared-cookie", + "path3-cookie", + "path4-cookie", + ), + ), + ( + "http://pathtest.com/one/two", + ( + "no-path-cookie", + "path1-cookie", + "path2-cookie", + "shared-cookie", + "path3-cookie", + ), + ), + ( + "http://pathtest.com/one/two/three/", + ( + "no-path-cookie", + "path1-cookie", + "path2-cookie", + "shared-cookie", + "path3-cookie", + "path4-cookie", + ), + ), + ( + "http://test1.example.com/", + ( + "shared-cookie", + "domain-cookie", + "subdomain1-cookie", + "dotted-domain-cookie", + ), + ), + ( + "http://pathtest.com/", + ( + "shared-cookie", + "no-path-cookie", + "path1-cookie", + ), + ), + ), +) +async def test_filter_cookies_with_domain_path_lookup_multilevelpath( + loop, + url, + expected_cookies, +) -> None: + jar = CookieJar() + cookies = SimpleCookie( + "shared-cookie=first; " + "domain-cookie=second; Domain=example.com; " + "subdomain1-cookie=third; Domain=test1.example.com; " + "subdomain2-cookie=fourth; Domain=test2.example.com; " + "dotted-domain-cookie=fifth; Domain=.example.com; " + "different-domain-cookie=sixth; Domain=different.org; " + "secure-cookie=seventh; Domain=secure.com; Secure; " + "no-path-cookie=eighth; Domain=pathtest.com; " + "path1-cookie=ninth; Domain=pathtest.com; Path=/; " + "path2-cookie=tenth; Domain=pathtest.com; Path=/one; " + "path3-cookie=eleventh; Domain=pathtest.com; Path=/one/two; " + "path4-cookie=twelfth; Domain=pathtest.com; Path=/one/two/; " + "expires-cookie=thirteenth; Domain=expirestest.com; Path=/;" + " Expires=Tue, 1 Jan 1980 12:00:00 GMT; " + "max-age-cookie=fourteenth; Domain=maxagetest.com; Path=/;" + " Max-Age=60; " + "invalid-max-age-cookie=fifteenth; Domain=invalid-values.com; " + " Max-Age=string; " + "invalid-expires-cookie=sixteenth; Domain=invalid-values.com; " + " Expires=string;" + ) + jar.update_cookies(cookies) + cookies = jar.filter_cookies(URL(url)) + + assert len(cookies) == len(expected_cookies) + for c in cookies: + assert c in expected_cookies + + +async def test_domain_filter_ip_cookie_send() -> None: + jar = CookieJar() cookies = SimpleCookie( "shared-cookie=first; " "domain-cookie=second; Domain=example.com; " @@ -486,11 +556,11 @@ def test_domain_filter_diff_host(self) -> None: def test_domain_filter_host_only(self) -> None: self.jar.update_cookies(self.cookies_to_receive, URL("http://example.com/")) + sub_cookie = SimpleCookie("subdomain=spam; Path=/;") + self.jar.update_cookies(sub_cookie, URL("http://foo.example.com/")) - cookies_sent = self.jar.filter_cookies(URL("http://example.com/")) - self.assertIn("unconstrained-cookie", set(cookies_sent.keys())) - - cookies_sent = self.jar.filter_cookies(URL("http://different.org/")) + cookies_sent = self.jar.filter_cookies(URL("http://foo.example.com/")) + self.assertIn("subdomain", set(cookies_sent.keys())) self.assertNotIn("unconstrained-cookie", set(cookies_sent.keys())) def test_secure_filter(self) -> None: @@ -779,12 +849,197 @@ async def test_cookie_jar_clear_expired(): with freeze_time("1980-01-01"): sut.update_cookies(cookie) - sut.clear(lambda x: False) + for _ in range(2): + sut.clear(not_) + with freeze_time("1980-01-01"): + assert len(sut) == 0 + + +async def test_cookie_jar_expired_changes() -> None: + """Test that expire time changes are handled as expected.""" + jar = CookieJar() + + cookie_eleven_am = SimpleCookie() + cookie_eleven_am["foo"] = "bar" + cookie_eleven_am["foo"]["expires"] = "Tue, 1 Jan 1990 11:00:00 GMT" + + cookie_noon = SimpleCookie() + cookie_noon["foo"] = "bar" + cookie_noon["foo"]["expires"] = "Tue, 1 Jan 1990 12:00:00 GMT" + + cookie_one_pm = SimpleCookie() + cookie_one_pm["foo"] = "bar" + cookie_one_pm["foo"]["expires"] = "Tue, 1 Jan 1990 13:00:00 GMT" + + cookie_two_pm = SimpleCookie() + cookie_two_pm["foo"] = "bar" + cookie_two_pm["foo"]["expires"] = "Tue, 1 Jan 1990 14:00:00 GMT" + + with freeze_time() as freezer: + freezer.move_to("1990-01-01 10:00:00+00:00") + jar.update_cookies(cookie_noon) + assert len(jar) == 1 + matched_cookies = jar.filter_cookies(URL("/")) + assert len(matched_cookies) == 1 + assert "foo" in matched_cookies + + jar.update_cookies(cookie_eleven_am) + matched_cookies = jar.filter_cookies(URL("/")) + assert len(matched_cookies) == 1 + assert "foo" in matched_cookies + + jar.update_cookies(cookie_one_pm) + matched_cookies = jar.filter_cookies(URL("/")) + assert len(matched_cookies) == 1 + assert "foo" in matched_cookies + + jar.update_cookies(cookie_two_pm) + matched_cookies = jar.filter_cookies(URL("/")) + assert len(matched_cookies) == 1 + assert "foo" in matched_cookies + + freezer.move_to("1990-01-01 13:00:00+00:00") + matched_cookies = jar.filter_cookies(URL("/")) + assert len(matched_cookies) == 1 + assert "foo" in matched_cookies + + freezer.move_to("1990-01-01 14:00:00+00:00") + matched_cookies = jar.filter_cookies(URL("/")) + assert len(matched_cookies) == 0 + + +async def test_cookie_jar_duplicates_with_expire_heap() -> None: + """Test that duplicate cookies do not grow the expires heap.""" + jar = CookieJar() + + cookie_eleven_am = SimpleCookie() + cookie_eleven_am["foo"] = "bar" + cookie_eleven_am["foo"]["expires"] = "Tue, 1 Jan 1990 11:00:00 GMT" + + cookie_two_pm = SimpleCookie() + cookie_two_pm["foo"] = "bar" + cookie_two_pm["foo"]["expires"] = "Tue, 1 Jan 1990 14:00:00 GMT" + + with freeze_time() as freezer: + freezer.move_to("1990-01-01 10:00:00+00:00") + + for _ in range(10): + jar.update_cookies(cookie_eleven_am) + + assert len(jar) == 1 + matched_cookies = jar.filter_cookies(URL("/")) + assert len(matched_cookies) == 1 + assert "foo" in matched_cookies + + assert len(jar._expire_heap) == 1 + + freezer.move_to("1990-01-01 16:00:00+00:00") + jar.update_cookies(cookie_two_pm) + matched_cookies = jar.filter_cookies(URL("/")) + assert len(matched_cookies) == 0 + assert len(jar._expire_heap) == 0 + + +async def test_cookie_jar_filter_cookies_expires() -> None: + """Test that calling filter_cookies will expire stale cookies.""" + jar = CookieJar() + assert len(jar) == 0 + + cookie = SimpleCookie() + + cookie["foo"] = "bar" + cookie["foo"]["expires"] = "Tue, 1 Jan 1990 12:00:00 GMT" + with freeze_time("1980-01-01"): - assert len(sut) == 0 + jar.update_cookies(cookie) + + assert len(jar) == 1 + + # filter_cookies should expire stale cookies + jar.filter_cookies(URL("http://any.com/")) + + assert len(jar) == 0 + + +async def test_cookie_jar_heap_cleanup() -> None: + """Test that the heap gets cleaned up when there are many old expirations.""" + jar = CookieJar() + # The heap should not be cleaned up when there are less than 100 expiration changes + min_cookies_to_cleanup = 100 + + with freeze_time() as freezer: + freezer.move_to("1990-01-01 09:00:00+00:00") + + start_time = datetime.datetime( + 1990, 1, 1, 10, 0, 0, tzinfo=datetime.timezone.utc + ) + for i in range(min_cookies_to_cleanup): + cookie = SimpleCookie() + cookie["foo"] = "bar" + cookie["foo"]["expires"] = ( + start_time + datetime.timedelta(seconds=i) + ).strftime("%a, %d %b %Y %H:%M:%S GMT") + jar.update_cookies(cookie) + assert len(jar._expire_heap) == i + 1 + + assert len(jar._expire_heap) == min_cookies_to_cleanup + + # Now that we reached the minimum number of cookies to cleanup, + # add one more cookie to trigger the cleanup + cookie = SimpleCookie() + cookie["foo"] = "bar" + cookie["foo"]["expires"] = ( + start_time + datetime.timedelta(seconds=i + 1) + ).strftime("%a, %d %b %Y %H:%M:%S GMT") + jar.update_cookies(cookie) + + # Verify that the heap has been cleaned up + assert len(jar) == 1 + matched_cookies = jar.filter_cookies(URL("/")) + assert len(matched_cookies) == 1 + assert "foo" in matched_cookies + # The heap should have been cleaned up + assert len(jar._expire_heap) == 1 + + +async def test_cookie_jar_heap_maintains_order_after_cleanup() -> None: + """Test that order is maintained after cleanup.""" + jar = CookieJar() + # The heap should not be cleaned up when there are less than 100 expiration changes + min_cookies_to_cleanup = 100 + + with freeze_time() as freezer: + freezer.move_to("1990-01-01 09:00:00+00:00") + + for hour in (12, 13): + for i in range(min_cookies_to_cleanup): + cookie = SimpleCookie() + cookie["foo"] = "bar" + cookie["foo"]["domain"] = f"example{i}.com" + cookie["foo"]["expires"] = f"Tue, 1 Jan 1990 {hour}:00:00 GMT" + jar.update_cookies(cookie) + + # Get the jar into a state where the next cookie will trigger the cleanup + assert len(jar._expire_heap) == min_cookies_to_cleanup * 2 + assert len(jar._expirations) == min_cookies_to_cleanup + + cookie = SimpleCookie() + cookie["foo"] = "bar" + cookie["foo"]["domain"] = "example0.com" + cookie["foo"]["expires"] = "Tue, 1 Jan 1990 14:00:00 GMT" + jar.update_cookies(cookie) + + assert len(jar) == 100 + # The heap should have been cleaned up + assert len(jar._expire_heap) == 100 + + # Verify that the heap is still ordered + heap_before = jar._expire_heap.copy() + heapq.heapify(jar._expire_heap) + assert heap_before == jar._expire_heap -async def test_cookie_jar_clear_domain(): +async def test_cookie_jar_clear_domain() -> None: sut = CookieJar() cookie = SimpleCookie() cookie["foo"] = "bar" @@ -825,7 +1080,7 @@ async def test_pickle_format(cookies_to_send) -> None: with file_path.open("wb") as f: pickle.dump(cookies, f, pickle.HIGHEST_PROTOCOL) """ - pickled = b"\x80\x05\x95\xc5\x07\x00\x00\x00\x00\x00\x00\x8c\x0bcollections\x94\x8c\x0bdefaultdict\x94\x93\x94\x8c\x0chttp.cookies\x94\x8c\x0cSimpleCookie\x94\x93\x94\x85\x94R\x94(\x8c\x00\x94\x8c\x01/\x94\x86\x94h\x05)\x81\x94\x8c\rshared-cookie\x94h\x03\x8c\x06Morsel\x94\x93\x94)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\t\x8c\x07comment\x94h\x08\x8c\x06domain\x94h\x08\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(\x8c\x03key\x94h\x0c\x8c\x05value\x94\x8c\x05first\x94\x8c\x0bcoded_value\x94h\x1cubs\x8c\x0bexample.com\x94h\t\x86\x94h\x05)\x81\x94(\x8c\rdomain-cookie\x94h\x0e)\x81\x94(h\x10h\x08h\x11h\th\x12h\x08h\x13h\x1eh\x14h\x08h\x15h\x08h\x16h\x08h\x17h\x08h\x18h\x08u}\x94(h\x1ah!h\x1b\x8c\x06second\x94h\x1dh$ub\x8c\x14dotted-domain-cookie\x94h\x0e)\x81\x94(h\x10h\x08h\x11h\th\x12h\x08h\x13\x8c\x0bexample.com\x94h\x14h\x08h\x15h\x08h\x16h\x08h\x17h\x08h\x18h\x08u}\x94(h\x1ah%h\x1b\x8c\x05fifth\x94h\x1dh)ubu\x8c\x11test1.example.com\x94h\t\x86\x94h\x05)\x81\x94\x8c\x11subdomain1-cookie\x94h\x0e)\x81\x94(h\x10h\x08h\x11h\th\x12h\x08h\x13h*h\x14h\x08h\x15h\x08h\x16h\x08h\x17h\x08h\x18h\x08u}\x94(h\x1ah-h\x1b\x8c\x05third\x94h\x1dh0ubs\x8c\x11test2.example.com\x94h\t\x86\x94h\x05)\x81\x94\x8c\x11subdomain2-cookie\x94h\x0e)\x81\x94(h\x10h\x08h\x11h\th\x12h\x08h\x13h1h\x14h\x08h\x15h\x08h\x16h\x08h\x17h\x08h\x18h\x08u}\x94(h\x1ah4h\x1b\x8c\x06fourth\x94h\x1dh7ubs\x8c\rdifferent.org\x94h\t\x86\x94h\x05)\x81\x94\x8c\x17different-domain-cookie\x94h\x0e)\x81\x94(h\x10h\x08h\x11h\th\x12h\x08h\x13h8h\x14h\x08h\x15h\x08h\x16h\x08h\x17h\x08h\x18h\x08u}\x94(h\x1ah;h\x1b\x8c\x05sixth\x94h\x1dh>ubs\x8c\nsecure.com\x94h\t\x86\x94h\x05)\x81\x94\x8c\rsecure-cookie\x94h\x0e)\x81\x94(h\x10h\x08h\x11h\th\x12h\x08h\x13h?h\x14h\x08h\x15\x88h\x16h\x08h\x17h\x08h\x18h\x08u}\x94(h\x1ahBh\x1b\x8c\x07seventh\x94h\x1dhEubs\x8c\x0cpathtest.com\x94h\t\x86\x94h\x05)\x81\x94(\x8c\x0eno-path-cookie\x94h\x0e)\x81\x94(h\x10h\x08h\x11h\th\x12h\x08h\x13hFh\x14h\x08h\x15h\x08h\x16h\x08h\x17h\x08h\x18h\x08u}\x94(h\x1ahIh\x1b\x8c\x06eighth\x94h\x1dhLub\x8c\x0cpath1-cookie\x94h\x0e)\x81\x94(h\x10h\x08h\x11h\th\x12h\x08h\x13\x8c\x0cpathtest.com\x94h\x14h\x08h\x15h\x08h\x16h\x08h\x17h\x08h\x18h\x08u}\x94(h\x1ahMh\x1b\x8c\x05ninth\x94h\x1dhQubu\x8c\x0cpathtest.com\x94\x8c\x04/one\x94\x86\x94h\x05)\x81\x94\x8c\x0cpath2-cookie\x94h\x0e)\x81\x94(h\x10h\x08h\x11hSh\x12h\x08h\x13hRh\x14h\x08h\x15h\x08h\x16h\x08h\x17h\x08h\x18h\x08u}\x94(h\x1ahVh\x1b\x8c\x05tenth\x94h\x1dhYubs\x8c\x0cpathtest.com\x94\x8c\x08/one/two\x94\x86\x94h\x05)\x81\x94\x8c\x0cpath3-cookie\x94h\x0e)\x81\x94(h\x10h\x08h\x11h[h\x12h\x08h\x13hZh\x14h\x08h\x15h\x08h\x16h\x08h\x17h\x08h\x18h\x08u}\x94(h\x1ah^h\x1b\x8c\x08eleventh\x94h\x1dhaubs\x8c\x0cpathtest.com\x94\x8c\t/one/two/\x94\x86\x94h\x05)\x81\x94\x8c\x0cpath4-cookie\x94h\x0e)\x81\x94(h\x10h\x08h\x11hch\x12h\x08h\x13hbh\x14h\x08h\x15h\x08h\x16h\x08h\x17h\x08h\x18h\x08u}\x94(h\x1ahfh\x1b\x8c\x07twelfth\x94h\x1dhiubs\x8c\x0fexpirestest.com\x94h\t\x86\x94h\x05)\x81\x94\x8c\x0eexpires-cookie\x94h\x0e)\x81\x94(h\x10\x8c\x1cTue, 1 Jan 2999 12:00:00 GMT\x94h\x11h\th\x12h\x08h\x13hjh\x14h\x08h\x15h\x08h\x16h\x08h\x17h\x08h\x18h\x08u}\x94(h\x1ahmh\x1b\x8c\nthirteenth\x94h\x1dhqubs\x8c\x0emaxagetest.com\x94h\t\x86\x94h\x05)\x81\x94\x8c\x0emax-age-cookie\x94h\x0e)\x81\x94(h\x10h\x08h\x11h\th\x12h\x08h\x13hrh\x14\x8c\x0260\x94h\x15h\x08h\x16h\x08h\x17h\x08h\x18h\x08u}\x94(h\x1ahuh\x1b\x8c\nfourteenth\x94h\x1dhyubs\x8c\x12invalid-values.com\x94h\t\x86\x94h\x05)\x81\x94(\x8c\x16invalid-max-age-cookie\x94h\x0e)\x81\x94(h\x10h\x08h\x11h\th\x12h\x08h\x13hzh\x14h\x08h\x15h\x08h\x16h\x08h\x17h\x08h\x18h\x08u}\x94(h\x1ah}h\x1b\x8c\tfifteenth\x94h\x1dh\x80ub\x8c\x16invalid-expires-cookie\x94h\x0e)\x81\x94(h\x10h\x08h\x11h\th\x12h\x08h\x13\x8c\x12invalid-values.com\x94h\x14h\x08h\x15h\x08h\x16h\x08h\x17h\x08h\x18h\x08u}\x94(h\x1ah\x81h\x1b\x8c\tsixteenth\x94h\x1dh\x85ubuu." + pickled = b"\x80\x04\x95\xc8\x0b\x00\x00\x00\x00\x00\x00\x8c\x0bcollections\x94\x8c\x0bdefaultdict\x94\x93\x94\x8c\x0chttp.cookies\x94\x8c\x0cSimpleCookie\x94\x93\x94\x85\x94R\x94(\x8c\x00\x94h\x08\x86\x94h\x05)\x81\x94\x8c\rshared-cookie\x94h\x03\x8c\x06Morsel\x94\x93\x94)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94\x8c\x01/\x94\x8c\x07comment\x94h\x08\x8c\x06domain\x94h\x08\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(\x8c\x03key\x94h\x0b\x8c\x05value\x94\x8c\x05first\x94\x8c\x0bcoded_value\x94h\x1cubs\x8c\x0bexample.com\x94h\x08\x86\x94h\x05)\x81\x94(\x8c\rdomain-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94h\x1e\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah!h\x1b\x8c\x06second\x94h\x1dh-ub\x8c\x14dotted-domain-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94\x8c\x0bexample.com\x94\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah.h\x1b\x8c\x05fifth\x94h\x1dh;ubu\x8c\x11test1.example.com\x94h\x08\x86\x94h\x05)\x81\x94\x8c\x11subdomain1-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94h<\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah?h\x1b\x8c\x05third\x94h\x1dhKubs\x8c\x11test2.example.com\x94h\x08\x86\x94h\x05)\x81\x94\x8c\x11subdomain2-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94hL\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ahOh\x1b\x8c\x06fourth\x94h\x1dh[ubs\x8c\rdifferent.org\x94h\x08\x86\x94h\x05)\x81\x94\x8c\x17different-domain-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94h\\\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah_h\x1b\x8c\x05sixth\x94h\x1dhkubs\x8c\nsecure.com\x94h\x08\x86\x94h\x05)\x81\x94\x8c\rsecure-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94hl\x8c\x07max-age\x94h\x08\x8c\x06secure\x94\x88\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ahoh\x1b\x8c\x07seventh\x94h\x1dh{ubs\x8c\x0cpathtest.com\x94h\x08\x86\x94h\x05)\x81\x94(\x8c\x0eno-path-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94h|\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah\x7fh\x1b\x8c\x06eighth\x94h\x1dh\x8bub\x8c\x0cpath1-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94\x8c\x0cpathtest.com\x94\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah\x8ch\x1b\x8c\x05ninth\x94h\x1dh\x99ubu\x8c\x0cpathtest.com\x94\x8c\x04/one\x94\x86\x94h\x05)\x81\x94\x8c\x0cpath2-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x9b\x8c\x07comment\x94h\x08\x8c\x06domain\x94h\x9a\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah\x9eh\x1b\x8c\x05tenth\x94h\x1dh\xaaubs\x8c\x0cpathtest.com\x94\x8c\x08/one/two\x94\x86\x94h\x05)\x81\x94(\x8c\x0cpath3-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\xac\x8c\x07comment\x94h\x08\x8c\x06domain\x94h\xab\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah\xafh\x1b\x8c\x08eleventh\x94h\x1dh\xbbub\x8c\x0cpath4-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94\x8c\t/one/two/\x94\x8c\x07comment\x94h\x08\x8c\x06domain\x94\x8c\x0cpathtest.com\x94\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah\xbch\x1b\x8c\x07twelfth\x94h\x1dh\xcaubu\x8c\x0fexpirestest.com\x94h\x08\x86\x94h\x05)\x81\x94\x8c\x0eexpires-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94\x8c\x1cTue, 1 Jan 2999 12:00:00 GMT\x94\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94h\xcb\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah\xceh\x1b\x8c\nthirteenth\x94h\x1dh\xdbubs\x8c\x0emaxagetest.com\x94h\x08\x86\x94h\x05)\x81\x94\x8c\x0emax-age-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94h\xdc\x8c\x07max-age\x94\x8c\x0260\x94\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah\xdfh\x1b\x8c\nfourteenth\x94h\x1dh\xecubs\x8c\x12invalid-values.com\x94h\x08\x86\x94h\x05)\x81\x94(\x8c\x16invalid-max-age-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94h\xed\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah\xf0h\x1b\x8c\tfifteenth\x94h\x1dh\xfcub\x8c\x16invalid-expires-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94\x8c\x12invalid-values.com\x94\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah\xfdh\x1b\x8c\tsixteenth\x94h\x1dj\n\x01\x00\x00ubuu." cookies = pickle.loads(pickled) cj = CookieJar() diff --git a/tests/test_formdata.py b/tests/test_formdata.py index 4bb8aa0..db1a386 100644 --- a/tests/test_formdata.py +++ b/tests/test_formdata.py @@ -1,3 +1,4 @@ +import io from unittest import mock import pytest @@ -46,6 +47,16 @@ def test_invalid_formdata_params2() -> None: FormData("as") # 2-char str is not allowed +async def test_formdata_textio_charset(buf: bytearray, writer) -> None: + form = FormData() + body = io.TextIOWrapper(io.BytesIO(b"\xe6\x97\xa5\xe6\x9c\xac"), encoding="utf-8") + form.add_field("foo", body, content_type="text/plain; charset=shift-jis") + payload = form() + await payload.write(writer) + assert b"charset=shift-jis" in buf + assert b"\x93\xfa\x96{" in buf + + def test_invalid_formdata_content_type() -> None: form = FormData() invalid_vals = [0, 0.1, {}, [], b"foo"] diff --git a/tests/test_helpers.py b/tests/test_helpers.py index b59528d..6c752ce 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -189,6 +189,20 @@ def test_basic_auth_from_url() -> None: assert auth.password == "pass" +def test_basic_auth_no_user_from_url() -> None: + url = URL("http://:pass@example.com") + auth = helpers.BasicAuth.from_url(url) + assert auth is not None + assert auth.login == "" + assert auth.password == "pass" + + +def test_basic_auth_no_auth_from_url() -> None: + url = URL("http://example.com") + auth = helpers.BasicAuth.from_url(url) + assert auth is None + + def test_basic_auth_from_not_url() -> None: with pytest.raises(TypeError): helpers.BasicAuth.from_url("http://user:pass@example.com") @@ -259,14 +273,6 @@ def test_is_ip_address() -> None: assert not helpers.is_ip_address("localhost") assert not helpers.is_ip_address("www.example.com") - # Out of range - assert not helpers.is_ip_address("999.999.999.999") - # Contain a port - assert not helpers.is_ip_address("127.0.0.1:80") - assert not helpers.is_ip_address("[2001:db8:0:1]:80") - # Too many "::" - assert not helpers.is_ip_address("1200::AB00:1234::2552:7777:1313") - def test_is_ip_address_bytes() -> None: assert helpers.is_ip_address(b"127.0.0.1") @@ -277,14 +283,6 @@ def test_is_ip_address_bytes() -> None: assert not helpers.is_ip_address(b"localhost") assert not helpers.is_ip_address(b"www.example.com") - # Out of range - assert not helpers.is_ip_address(b"999.999.999.999") - # Contain a port - assert not helpers.is_ip_address(b"127.0.0.1:80") - assert not helpers.is_ip_address(b"[2001:db8:0:1]:80") - # Too many "::" - assert not helpers.is_ip_address(b"1200::AB00:1234::2552:7777:1313") - def test_ipv4_addresses() -> None: ip_addresses = [ @@ -332,6 +330,18 @@ def test_is_ip_address_invalid_type() -> None: with pytest.raises(TypeError): helpers.is_ip_address(object()) + with pytest.raises(TypeError): + helpers.is_ipv4_address(123) # type: ignore[arg-type] + + with pytest.raises(TypeError): + helpers.is_ipv4_address(object()) # type: ignore[arg-type] + + with pytest.raises(TypeError): + helpers.is_ipv6_address(123) # type: ignore[arg-type] + + with pytest.raises(TypeError): + helpers.is_ipv6_address(object()) # type: ignore[arg-type] + # ----------------------------------- TimeoutHandle ------------------- @@ -393,7 +403,61 @@ def test_timer_context_not_cancelled() -> None: assert not m_asyncio.current_task.return_value.cancel.called -def test_timer_context_no_task(loop) -> None: +@pytest.mark.skipif( + sys.version_info < (3, 11), reason="Python 3.11+ is required for .cancelling()" +) +async def test_timer_context_timeout_does_not_leak_upward() -> None: + """Verify that the TimerContext does not leak cancellation outside the context manager.""" + loop = asyncio.get_running_loop() + ctx = helpers.TimerContext(loop) + current_task = asyncio.current_task() + assert current_task is not None + with pytest.raises(asyncio.TimeoutError): + with ctx: + assert current_task.cancelling() == 0 + loop.call_soon(ctx.timeout) + await asyncio.sleep(1) + + # After the context manager exits, the task should no longer be cancelling + assert current_task.cancelling() == 0 + + +@pytest.mark.skipif( + sys.version_info < (3, 11), reason="Python 3.11+ is required for .cancelling()" +) +async def test_timer_context_timeout_does_swallow_cancellation() -> None: + """Verify that the TimerContext does not swallow cancellation.""" + loop = asyncio.get_running_loop() + current_task = asyncio.current_task() + assert current_task is not None + ctx = helpers.TimerContext(loop) + + async def task_with_timeout() -> None: + nonlocal ctx + new_task = asyncio.current_task() + assert new_task is not None + with pytest.raises(asyncio.TimeoutError): + with ctx: + assert new_task.cancelling() == 0 + await asyncio.sleep(1) + + task = asyncio.create_task(task_with_timeout()) + await asyncio.sleep(0) + task.cancel() + assert task.cancelling() == 1 + ctx.timeout() + + # Cancellation should not leak into the current task + assert current_task.cancelling() == 0 + # Cancellation should not be swallowed if the task is cancelled + # and it also times out + await asyncio.sleep(0) + with pytest.raises(asyncio.CancelledError): + await task + assert task.cancelling() == 1 + + +def test_timer_context_no_task(loop: asyncio.AbstractEventLoop) -> None: with pytest.raises(RuntimeError): with helpers.TimerContext(loop): pass @@ -607,18 +671,6 @@ def test_proxies_from_env_http_with_auth(url_input, expected_scheme) -> None: assert proxy_auth.encoding == "latin1" -# ------------ get_running_loop --------------------------------- - - -def test_get_running_loop_not_running(loop) -> None: - with pytest.warns(DeprecationWarning): - helpers.get_running_loop() - - -async def test_get_running_loop_ok(loop) -> None: - assert helpers.get_running_loop() is loop - - # --------------------- get_env_proxy_for_url ------------------------------ diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index ee7dc4a..09f4f07 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -17,6 +17,7 @@ NO_EXTENSIONS, DeflateBuffer, HttpPayloadParser, + HttpRequestParser, HttpRequestParserPy, HttpResponseParserPy, HttpVersion, @@ -84,6 +85,7 @@ def response(loop: Any, protocol: Any, request: Any): max_line_size=8190, max_headers=32768, max_field_size=8190, + read_until_eof=True, ) @@ -294,9 +296,20 @@ def test_parse_headers_longline(parser: Any) -> None: parser.feed_data(text) +@pytest.fixture +def xfail_c_parser_status(request) -> None: + if isinstance(request.getfixturevalue("parser"), HttpRequestParserPy): + return + request.node.add_marker( + pytest.mark.xfail( + reason="Regression test for Py parser. May match C behaviour later.", + raises=http_exceptions.BadStatusLine, + ) + ) + + +@pytest.mark.usefixtures("xfail_c_parser_status") def test_parse_unusual_request_line(parser) -> None: - if not isinstance(response, HttpResponseParserPy): - pytest.xfail("Regression test for Py parser. May match C behaviour later.") text = b"#smol //a HTTP/1.3\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) assert len(messages) == 1 @@ -503,6 +516,23 @@ def test_request_te_chunked123(parser: Any) -> None: parser.feed_data(text) +async def test_request_te_last_chunked(parser: Any) -> None: + text = b"GET /test HTTP/1.1\r\nTransfer-Encoding: not, chunked\r\n\r\n1\r\nT\r\n3\r\nest\r\n0\r\n\r\n" + messages, upgrade, tail = parser.feed_data(text) + # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.3 + assert await messages[0][1].read() == b"Test" + + +def test_request_te_first_chunked(parser: Any) -> None: + text = b"GET /test HTTP/1.1\r\nTransfer-Encoding: chunked, not\r\n\r\n1\r\nT\r\n3\r\nest\r\n0\r\n\r\n" + # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.3 + with pytest.raises( + http_exceptions.BadHttpMessage, + match="nvalid `Transfer-Encoding`", + ): + parser.feed_data(text) + + def test_conn_upgrade(parser: Any) -> None: text = ( b"GET /test HTTP/1.1\r\n" @@ -632,9 +662,6 @@ def test_invalid_header_spacing(parser, pad1: bytes, pad2: bytes, hdr: bytes) -> if pad1 == pad2 == b"" and hdr != b"": # one entry in param matrix is correct: non-empty name, not padded expectation = nullcontext() - if pad1 == pad2 == hdr == b"": - if not isinstance(response, HttpResponseParserPy): - pytest.xfail("Regression test for Py parser. May match C behaviour later.") with expectation: parser.feed_data(text) @@ -800,7 +827,15 @@ def test_http_request_bad_status_line_whitespace(parser: Any) -> None: parser.feed_data(text) -def test_http_request_upgrade(parser: Any) -> None: +def test_http_request_message_after_close(parser: HttpRequestParser) -> None: + text = b"GET / HTTP/1.1\r\nConnection: close\r\n\r\nInvalid\r\n\r\n" + with pytest.raises( + http_exceptions.BadHttpMessage, match="Data after `Connection: close`" + ): + parser.feed_data(text) + + +def test_http_request_upgrade(parser: HttpRequestParser) -> None: text = ( b"GET /test HTTP/1.1\r\n" b"connection: upgrade\r\n" @@ -815,9 +850,40 @@ def test_http_request_upgrade(parser: Any) -> None: assert tail == b"some raw data" +async def test_http_request_upgrade_unknown(parser: Any) -> None: + text = ( + b"POST / HTTP/1.1\r\n" + b"Connection: Upgrade\r\n" + b"Content-Length: 2\r\n" + b"Upgrade: unknown\r\n" + b"Content-Type: application/json\r\n\r\n" + b"{}" + ) + messages, upgrade, tail = parser.feed_data(text) + + msg = messages[0][0] + assert not msg.should_close + assert msg.upgrade + assert not upgrade + assert not msg.chunked + assert tail == b"" + assert await messages[0][-1].read() == b"{}" + + +@pytest.fixture +def xfail_c_parser_url(request) -> None: + if isinstance(request.getfixturevalue("parser"), HttpRequestParserPy): + return + request.node.add_marker( + pytest.mark.xfail( + reason="Regression test for Py parser. May match C behaviour later.", + raises=http_exceptions.InvalidURLError, + ) + ) + + +@pytest.mark.usefixtures("xfail_c_parser_url") def test_http_request_parser_utf8_request_line(parser) -> None: - if not isinstance(response, HttpResponseParserPy): - pytest.xfail("Regression test for Py parser. May match C behaviour later.") messages, upgrade, tail = parser.feed_data( # note the truncated unicode sequence b"GET /P\xc3\xbcnktchen\xa0\xef\xb7 HTTP/1.1\r\n" + @@ -837,7 +903,9 @@ def test_http_request_parser_utf8_request_line(parser) -> None: assert msg.compression is None assert not msg.upgrade assert not msg.chunked - assert msg.url.path == URL("/P%C3%BCnktchen\udca0\udcef\udcb7").path + # python HTTP parser depends on Cython and CPython URL to match + # .. but yarl.URL("/abs") is not equal to URL.build(path="/abs"), see #6409 + assert msg.url == URL.build(path="/Pünktchen\udca0\udcef\udcb7", encoded=True) def test_http_request_parser_utf8(parser) -> None: @@ -1113,6 +1181,23 @@ async def test_http_response_parser_bad_chunked_strict_c(loop, protocol) -> None response.feed_data(text) +async def test_http_response_parser_notchunked(response) -> None: + text = b"HTTP/1.1 200 OK\r\nTransfer-Encoding: notchunked\r\n\r\n1\r\nT\r\n3\r\nest\r\n0\r\n\r\n" + messages, upgrade, tail = response.feed_data(text) + response.feed_eof() + + # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.2 + assert await messages[0][1].read() == b"1\r\nT\r\n3\r\nest\r\n0\r\n\r\n" + + +async def test_http_response_parser_last_chunked(response) -> None: + text = b"HTTP/1.1 200 OK\r\nTransfer-Encoding: not, chunked\r\n\r\n1\r\nT\r\n3\r\nest\r\n0\r\n\r\n" + messages, upgrade, tail = response.feed_data(text) + + # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.2 + assert await messages[0][1].read() == b"Test" + + def test_http_response_parser_bad(response) -> None: with pytest.raises(http_exceptions.BadHttpMessage): response.feed_data(b"HTT/1\r\n\r\n") @@ -1209,8 +1294,8 @@ def test_parse_chunked_payload_chunk_extension(parser) -> None: assert payload.is_eof() -def _test_parse_no_length_or_te_on_post(loop, protocol, request_cls): - parser = request_cls(protocol, loop, readall=True) +def test_parse_no_length_or_te_on_post(loop: Any, protocol: Any, request_cls: Any): + parser = request_cls(protocol, loop, limit=2**16) text = b"POST /test HTTP/1.1\r\n\r\n" msg, payload = parser.feed_data(text)[0][0] @@ -1369,6 +1454,29 @@ def test_parse_chunked_payload_empty_body_than_another_chunked( assert b"second" == b"".join(d for d in payload._buffer) +async def test_parse_chunked_payload_split_chunks(response: Any) -> None: + network_chunks = ( + b"HTTP/1.1 200 OK\r\nTransfer-Encoding: chunked\r\n\r\n", + b"5\r\nfi", + b"rst", + # This simulates a bug in lax mode caused when the \r\n separator, before the + # next HTTP chunk, appears at the start of the next network chunk. + b"\r\n", + b"6", + b"\r", + b"\n", + b"second\r", + b"\n0\r\n\r\n", + ) + reader = response.feed_data(network_chunks[0])[0][0][1] + for c in network_chunks[1:]: + response.feed_data(c) + + assert response.feed_eof() is None + assert reader.is_eof() + assert await reader.read() == b"firstsecond" + + def test_partial_url(parser: Any) -> None: messages, upgrade, tail = parser.feed_data(b"GET /te") assert len(messages) == 0 @@ -1454,29 +1562,16 @@ def test_parse_bad_method_for_c_parser_raises(loop, protocol): class TestParsePayload: async def test_parse_eof_payload(self, stream) -> None: - out = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) - p = HttpPayloadParser(out, readall=True) + out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) + p = HttpPayloadParser(out) p.feed_data(b"data") p.feed_eof() assert out.is_eof() assert [(bytearray(b"data"), 4)] == list(out._buffer) - async def test_parse_no_body(self, stream) -> None: - out = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) - p = HttpPayloadParser(out, method="PUT") - - assert out.is_eof() - assert p.done - async def test_parse_length_payload_eof(self, stream) -> None: - out = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) p = HttpPayloadParser(out, length=4) p.feed_data(b"da") @@ -1485,9 +1580,7 @@ async def test_parse_length_payload_eof(self, stream) -> None: p.feed_eof() async def test_parse_chunked_payload_size_error(self, stream) -> None: - out = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) p = HttpPayloadParser(out, chunked=True) with pytest.raises(http_exceptions.TransferEncodingError): p.feed_data(b"blah\r\n") @@ -1550,9 +1643,7 @@ async def test_parse_chunked_payload_split_end_trailers4(self, protocol) -> None assert b"asdf" == b"".join(out._buffer) async def test_http_payload_parser_length(self, stream) -> None: - out = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) p = HttpPayloadParser(out, length=2) eof, tail = p.feed_data(b"1245") assert eof @@ -1565,9 +1656,7 @@ async def test_http_payload_parser_deflate(self, stream) -> None: COMPRESSED = b"x\x9cKI,I\x04\x00\x04\x00\x01\x9b" length = len(COMPRESSED) - out = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) p = HttpPayloadParser(out, length=length, compression="deflate") p.feed_data(COMPRESSED) assert b"data" == b"".join(d for d, _ in out._buffer) @@ -1579,9 +1668,7 @@ async def test_http_payload_parser_deflate_no_hdrs(self, stream: Any) -> None: COMPRESSED = b"KI,I\x04\x00" length = len(COMPRESSED) - out = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) p = HttpPayloadParser(out, length=length, compression="deflate") p.feed_data(COMPRESSED) assert b"data" == b"".join(d for d, _ in out._buffer) @@ -1592,19 +1679,15 @@ async def test_http_payload_parser_deflate_light(self, stream) -> None: COMPRESSED = b"\x18\x95KI,I\x04\x00\x04\x00\x01\x9b" length = len(COMPRESSED) - out = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) p = HttpPayloadParser(out, length=length, compression="deflate") p.feed_data(COMPRESSED) assert b"data" == b"".join(d for d, _ in out._buffer) assert out.is_eof() async def test_http_payload_parser_deflate_split(self, stream) -> None: - out = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) - p = HttpPayloadParser(out, compression="deflate", readall=True) + out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) + p = HttpPayloadParser(out, compression="deflate") # Feeding one correct byte should be enough to choose exact # deflate decompressor p.feed_data(b"x", 1) @@ -1613,10 +1696,8 @@ async def test_http_payload_parser_deflate_split(self, stream) -> None: assert b"data" == b"".join(d for d, _ in out._buffer) async def test_http_payload_parser_deflate_split_err(self, stream) -> None: - out = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) - p = HttpPayloadParser(out, compression="deflate", readall=True) + out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) + p = HttpPayloadParser(out, compression="deflate") # Feeding one wrong byte should be enough to choose exact # deflate decompressor p.feed_data(b"K", 1) @@ -1625,9 +1706,7 @@ async def test_http_payload_parser_deflate_split_err(self, stream) -> None: assert b"data" == b"".join(d for d, _ in out._buffer) async def test_http_payload_parser_length_zero(self, stream) -> None: - out = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) p = HttpPayloadParser(out, length=0) assert p.done assert out.is_eof() @@ -1635,9 +1714,7 @@ async def test_http_payload_parser_length_zero(self, stream) -> None: @pytest.mark.skipif(brotli is None, reason="brotli is not installed") async def test_http_payload_brotli(self, stream) -> None: compressed = brotli.compress(b"brotli data") - out = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) p = HttpPayloadParser(out, length=len(compressed), compression="br") p.feed_data(compressed) assert b"brotli data" == b"".join(d for d, _ in out._buffer) @@ -1646,9 +1723,7 @@ async def test_http_payload_brotli(self, stream) -> None: class TestDeflateBuffer: async def test_feed_data(self, stream) -> None: - buf = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + buf = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) dbuf = DeflateBuffer(buf, "deflate") dbuf.decompressor = mock.Mock() @@ -1659,9 +1734,7 @@ async def test_feed_data(self, stream) -> None: assert [b"line"] == list(d for d, _ in buf._buffer) async def test_feed_data_err(self, stream) -> None: - buf = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + buf = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) dbuf = DeflateBuffer(buf, "deflate") exc = ValueError() @@ -1674,9 +1747,7 @@ async def test_feed_data_err(self, stream) -> None: dbuf.feed_data(b"xsomedata", 9) async def test_feed_eof(self, stream) -> None: - buf = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + buf = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) dbuf = DeflateBuffer(buf, "deflate") dbuf.decompressor = mock.Mock() @@ -1687,9 +1758,7 @@ async def test_feed_eof(self, stream) -> None: assert buf._eof async def test_feed_eof_err_deflate(self, stream) -> None: - buf = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + buf = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) dbuf = DeflateBuffer(buf, "deflate") dbuf.decompressor = mock.Mock() @@ -1700,9 +1769,7 @@ async def test_feed_eof_err_deflate(self, stream) -> None: dbuf.feed_eof() async def test_feed_eof_no_err_gzip(self, stream) -> None: - buf = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + buf = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) dbuf = DeflateBuffer(buf, "gzip") dbuf.decompressor = mock.Mock() @@ -1713,9 +1780,7 @@ async def test_feed_eof_no_err_gzip(self, stream) -> None: assert [b"line"] == list(d for d, _ in buf._buffer) async def test_feed_eof_no_err_brotli(self, stream) -> None: - buf = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + buf = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) dbuf = DeflateBuffer(buf, "br") dbuf.decompressor = mock.Mock() @@ -1726,9 +1791,7 @@ async def test_feed_eof_no_err_brotli(self, stream) -> None: assert [b"line"] == list(d for d, _ in buf._buffer) async def test_empty_body(self, stream) -> None: - buf = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + buf = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) dbuf = DeflateBuffer(buf, "deflate") dbuf.feed_eof() diff --git a/tests/test_http_writer.py b/tests/test_http_writer.py index 5649f32..82ad07d 100644 --- a/tests/test_http_writer.py +++ b/tests/test_http_writer.py @@ -5,7 +5,7 @@ import pytest from multidict import CIMultiDict -from aiohttp import http +from aiohttp import ClientConnectionResetError, http from aiohttp.test_utils import make_mocked_coro @@ -232,12 +232,12 @@ async def test_write_to_closing_transport(protocol, transport, loop) -> None: await msg.write(b"Before closing") transport.is_closing.return_value = True - with pytest.raises(ConnectionResetError): + with pytest.raises(ClientConnectionResetError): await msg.write(b"After closing") async def test_write_to_closed_transport(protocol, transport, loop) -> None: - """Test that writing to a closed transport raises ConnectionResetError. + """Test that writing to a closed transport raises ClientConnectionResetError. The StreamWriter checks to see if protocol.transport is None before writing to the transport. If it is None, it raises ConnectionResetError. @@ -247,7 +247,9 @@ async def test_write_to_closed_transport(protocol, transport, loop) -> None: await msg.write(b"Before transport close") protocol.transport = None - with pytest.raises(ConnectionResetError, match="Cannot write to closing transport"): + with pytest.raises( + ClientConnectionResetError, match="Cannot write to closing transport" + ): await msg.write(b"After transport closed") diff --git a/tests/test_locks.py b/tests/test_locks.py deleted file mode 100644 index 5f434ea..0000000 --- a/tests/test_locks.py +++ /dev/null @@ -1,54 +0,0 @@ -# Tests of custom aiohttp locks implementations -import asyncio - -import pytest - -from aiohttp.locks import EventResultOrError - - -class TestEventResultOrError: - async def test_set_exception(self, loop) -> None: - ev = EventResultOrError(loop=loop) - - async def c(): - try: - await ev.wait() - except Exception as e: - return e - return 1 - - t = loop.create_task(c()) - await asyncio.sleep(0) - e = Exception() - ev.set(exc=e) - assert (await t) == e - - async def test_set(self, loop) -> None: - ev = EventResultOrError(loop=loop) - - async def c(): - await ev.wait() - return 1 - - t = loop.create_task(c()) - await asyncio.sleep(0) - ev.set() - assert (await t) == 1 - - async def test_cancel_waiters(self, loop) -> None: - ev = EventResultOrError(loop=loop) - - async def c(): - await ev.wait() - - t1 = loop.create_task(c()) - t2 = loop.create_task(c()) - await asyncio.sleep(0) - ev.cancel() - ev.set() - - with pytest.raises(asyncio.CancelledError): - await t1 - - with pytest.raises(asyncio.CancelledError): - await t2 diff --git a/tests/test_multipart.py b/tests/test_multipart.py index 436b709..bbbc1c6 100644 --- a/tests/test_multipart.py +++ b/tests/test_multipart.py @@ -2,6 +2,7 @@ import io import json import pathlib +import sys import zlib from unittest import mock @@ -377,6 +378,17 @@ async def test_read_with_content_transfer_encoding_quoted_printable(self) -> Non ) assert result == expected + async def test_decode_with_content_transfer_encoding_base64(self) -> None: + with Stream(b"VG\r\r\nltZSB0byBSZ\r\nWxheCE=\r\n--:--") as stream: + obj = aiohttp.BodyPartReader( + BOUNDARY, {CONTENT_TRANSFER_ENCODING: "base64"}, stream + ) + result = b"" + while not obj.at_eof(): + chunk = await obj.read_chunk(size=6) + result += obj.decode(chunk) + assert b"Time to Relax!" == result + @pytest.mark.parametrize("encoding", ("binary", "8bit", "7bit")) async def test_read_with_content_transfer_encoding_binary( self, encoding: str @@ -754,6 +766,66 @@ async def test_invalid_boundary(self) -> None: with pytest.raises(ValueError): await reader.next() + @pytest.mark.skipif(sys.version_info < (3, 10), reason="Needs anext()") + async def test_read_boundary_across_chunks(self) -> None: + class SplitBoundaryStream: + def __init__(self) -> None: + self.content = [ + b"--foobar\r\n\r\n", + b"Hello,\r\n-", + b"-fo", + b"ob", + b"ar\r\n", + b"\r\nwor", + b"ld!", + b"\r\n--f", + b"oobar--", + ] + + async def read(self, size=None) -> bytes: + chunk = self.content.pop(0) + assert len(chunk) <= size + return chunk + + def at_eof(self) -> bool: + return not self.content + + async def readline(self) -> bytes: + line = b"" + while self.content and b"\n" not in line: + line += self.content.pop(0) + line, *extra = line.split(b"\n", maxsplit=1) + if extra and extra[0]: + self.content.insert(0, extra[0]) + return line + b"\n" + + def unread_data(self, data: bytes) -> None: + if self.content: + self.content[0] = data + self.content[0] + else: + self.content.append(data) + + stream = SplitBoundaryStream() + reader = aiohttp.MultipartReader( + {CONTENT_TYPE: 'multipart/related;boundary="foobar"'}, stream + ) + part = await anext(reader) + result = await part.read_chunk(10) + assert result == b"Hello," + result = await part.read_chunk(10) + assert result == b"" + assert part.at_eof() + + part = await anext(reader) + result = await part.read_chunk(10) + assert result == b"world!" + result = await part.read_chunk(10) + assert result == b"" + assert part.at_eof() + + with pytest.raises(StopAsyncIteration): + await anext(reader) + async def test_release(self) -> None: with Stream( newline.join( diff --git a/tests/test_payload.py b/tests/test_payload.py index c8681cb..0e2db91 100644 --- a/tests/test_payload.py +++ b/tests/test_payload.py @@ -17,6 +17,9 @@ def registry(): class Payload(payload.Payload): + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + assert False + async def write(self, writer): pass diff --git a/tests/test_proxy.py b/tests/test_proxy.py index 6366a13..4fa5e93 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -4,6 +4,7 @@ import ssl import sys import unittest +from typing import Any from unittest import mock import pytest @@ -11,6 +12,7 @@ import aiohttp from aiohttp.client_reqrep import ClientRequest, ClientResponse +from aiohttp.connector import _SSL_CONTEXT_VERIFIED from aiohttp.helpers import TimerNoop from aiohttp.test_utils import make_mocked_coro @@ -40,7 +42,12 @@ def tearDown(self): gc.collect() @mock.patch("aiohttp.connector.ClientRequest") - def test_connect(self, ClientRequestMock) -> None: + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_connect(self, start_connection: Any, ClientRequestMock: Any) -> None: req = ClientRequest( "GET", URL("http://www.python.org"), @@ -54,7 +61,18 @@ async def make_conn(): return aiohttp.TCPConnector() connector = self.loop.run_until_complete(make_conn()) - connector._resolve_host = make_mocked_coro([mock.MagicMock()]) + connector._resolve_host = make_mocked_coro( + [ + { + "hostname": "hostname", + "host": "127.0.0.1", + "port": 80, + "family": socket.AF_INET, + "proto": 0, + "flags": 0, + } + ] + ) proto = mock.Mock( **{ @@ -81,7 +99,12 @@ async def make_conn(): conn.close() @mock.patch("aiohttp.connector.ClientRequest") - def test_proxy_headers(self, ClientRequestMock) -> None: + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_proxy_headers(self, start_connection: Any, ClientRequestMock: Any) -> None: req = ClientRequest( "GET", URL("http://www.python.org"), @@ -96,7 +119,18 @@ async def make_conn(): return aiohttp.TCPConnector() connector = self.loop.run_until_complete(make_conn()) - connector._resolve_host = make_mocked_coro([mock.MagicMock()]) + connector._resolve_host = make_mocked_coro( + [ + { + "hostname": "hostname", + "host": "127.0.0.1", + "port": 80, + "family": socket.AF_INET, + "proto": 0, + "flags": 0, + } + ] + ) proto = mock.Mock( **{ @@ -122,7 +156,12 @@ async def make_conn(): conn.close() - def test_proxy_auth(self) -> None: + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_proxy_auth(self, start_connection: Any) -> None: with self.assertRaises(ValueError) as ctx: ClientRequest( "GET", @@ -136,11 +175,16 @@ def test_proxy_auth(self) -> None: "proxy_auth must be None or BasicAuth() tuple", ) - def test_proxy_dns_error(self) -> None: + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_proxy_dns_error(self, start_connection: Any) -> None: async def make_conn(): return aiohttp.TCPConnector() - connector = self.loop.run_until_complete(make_conn()) + connector: aiohttp.TCPConnector = self.loop.run_until_complete(make_conn()) connector._resolve_host = make_mocked_coro( raise_exception=OSError("dont take it serious") ) @@ -159,7 +203,12 @@ async def make_conn(): self.assertEqual(req.url.path, "/") self.assertEqual(dict(req.headers), expected_headers) - def test_proxy_connection_error(self) -> None: + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_proxy_connection_error(self, start_connection: Any) -> None: async def make_conn(): return aiohttp.TCPConnector() @@ -192,7 +241,14 @@ async def make_conn(): ) @mock.patch("aiohttp.connector.ClientRequest") - def test_proxy_server_hostname_default(self, ClientRequestMock) -> None: + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_proxy_server_hostname_default( + self, start_connection: Any, ClientRequestMock: Any + ) -> None: proxy_req = ClientRequest( "GET", URL("http://proxy.example.com"), loop=self.loop ) @@ -252,7 +308,14 @@ async def make_conn(): self.loop.run_until_complete(req.close()) @mock.patch("aiohttp.connector.ClientRequest") - def test_proxy_server_hostname_override(self, ClientRequestMock) -> None: + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_proxy_server_hostname_override( + self, start_connection: Any, ClientRequestMock: Any + ) -> None: proxy_req = ClientRequest( "GET", URL("http://proxy.example.com"), @@ -316,7 +379,12 @@ async def make_conn(): self.loop.run_until_complete(req.close()) @mock.patch("aiohttp.connector.ClientRequest") - def test_https_connect(self, ClientRequestMock) -> None: + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_https_connect(self, start_connection: Any, ClientRequestMock: Any) -> None: proxy_req = ClientRequest( "GET", URL("http://proxy.example.com"), loop=self.loop ) @@ -376,7 +444,14 @@ async def make_conn(): self.loop.run_until_complete(req.close()) @mock.patch("aiohttp.connector.ClientRequest") - def test_https_connect_certificate_error(self, ClientRequestMock) -> None: + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_https_connect_certificate_error( + self, start_connection: Any, ClientRequestMock: Any + ) -> None: proxy_req = ClientRequest( "GET", URL("http://proxy.example.com"), loop=self.loop ) @@ -430,7 +505,14 @@ async def make_conn(): ) @mock.patch("aiohttp.connector.ClientRequest") - def test_https_connect_ssl_error(self, ClientRequestMock) -> None: + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_https_connect_ssl_error( + self, start_connection: Any, ClientRequestMock: Any + ) -> None: proxy_req = ClientRequest( "GET", URL("http://proxy.example.com"), loop=self.loop ) @@ -486,7 +568,14 @@ async def make_conn(): ) @mock.patch("aiohttp.connector.ClientRequest") - def test_https_connect_http_proxy_error(self, ClientRequestMock) -> None: + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_https_connect_http_proxy_error( + self, start_connection: Any, ClientRequestMock: Any + ) -> None: proxy_req = ClientRequest( "GET", URL("http://proxy.example.com"), loop=self.loop ) @@ -545,7 +634,14 @@ async def make_conn(): self.loop.run_until_complete(req.close()) @mock.patch("aiohttp.connector.ClientRequest") - def test_https_connect_resp_start_error(self, ClientRequestMock) -> None: + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_https_connect_resp_start_error( + self, start_connection: Any, ClientRequestMock: Any + ) -> None: proxy_req = ClientRequest( "GET", URL("http://proxy.example.com"), loop=self.loop ) @@ -598,7 +694,12 @@ async def make_conn(): ) @mock.patch("aiohttp.connector.ClientRequest") - def test_request_port(self, ClientRequestMock) -> None: + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_request_port(self, start_connection: Any, ClientRequestMock: Any) -> None: proxy_req = ClientRequest( "GET", URL("http://proxy.example.com"), loop=self.loop ) @@ -656,7 +757,14 @@ def test_proxy_auth_property_default(self) -> None: self.assertIsNone(req.proxy_auth) @mock.patch("aiohttp.connector.ClientRequest") - def test_https_connect_pass_ssl_context(self, ClientRequestMock) -> None: + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_https_connect_pass_ssl_context( + self, start_connection: Any, ClientRequestMock: Any + ) -> None: proxy_req = ClientRequest( "GET", URL("http://proxy.example.com"), loop=self.loop ) @@ -710,7 +818,7 @@ async def make_conn(): self.loop.start_tls.assert_called_with( mock.ANY, mock.ANY, - connector._make_ssl_context(True), + _SSL_CONTEXT_VERIFIED, server_hostname="www.python.org", ssl_handshake_timeout=mock.ANY, ) @@ -724,7 +832,12 @@ async def make_conn(): self.loop.run_until_complete(req.close()) @mock.patch("aiohttp.connector.ClientRequest") - def test_https_auth(self, ClientRequestMock) -> None: + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_https_auth(self, start_connection: Any, ClientRequestMock: Any) -> None: proxy_req = ClientRequest( "GET", URL("http://proxy.example.com"), diff --git a/tests/test_proxy_functional.py b/tests/test_proxy_functional.py index 099922a..4b11d11 100644 --- a/tests/test_proxy_functional.py +++ b/tests/test_proxy_functional.py @@ -12,22 +12,10 @@ from yarl import URL import aiohttp -from aiohttp import web +from aiohttp import client_reqrep, web from aiohttp.client_exceptions import ClientConnectionError from aiohttp.helpers import IS_MACOS, IS_WINDOWS -pytestmark = [ - pytest.mark.filterwarnings( - "ignore:unclosed = (3, 11) @@ -107,6 +95,7 @@ async def handler(*args, **kwargs): reason="asyncio on this python does not support TLS in TLS", ) @pytest.mark.parametrize("web_server_endpoint_type", ("http", "https")) +@pytest.mark.parametrize("yarl_supports_host_subcomponent", [True, False]) @pytest.mark.filterwarnings(r"ignore:.*ssl.OP_NO_SSL*") # Filter out the warning from # https://github.com/abhinavsingh/proxy.py/blob/30574fd0414005dfa8792a6e797023e862bdcf43/proxy/common/utils.py#L226 @@ -116,21 +105,26 @@ async def test_secure_https_proxy_absolute_path( secure_proxy_url: URL, web_server_endpoint_url: str, web_server_endpoint_payload: str, + yarl_supports_host_subcomponent: bool, ) -> None: """Ensure HTTP(S) sites are accessible through a secure proxy.""" conn = aiohttp.TCPConnector() sess = aiohttp.ClientSession(connector=conn) - response = await sess.get( - web_server_endpoint_url, - proxy=secure_proxy_url, - ssl=client_ssl_ctx, # used for both proxy and endpoint connections - ) - - assert response.status == 200 - assert await response.text() == web_server_endpoint_payload + # Ensure the old path is tested for old yarl versions + with mock.patch.object( + client_reqrep, + "_YARL_SUPPORTS_HOST_SUBCOMPONENT", + yarl_supports_host_subcomponent, + ): + async with sess.get( + web_server_endpoint_url, + proxy=secure_proxy_url, + ssl=client_ssl_ctx, # used for both proxy and endpoint connections + ) as response: + assert response.status == 200 + assert await response.text() == web_server_endpoint_payload - response.close() await sess.close() await conn.close() @@ -192,13 +186,17 @@ async def test_https_proxy_unsupported_tls_in_tls( r"$" ) - with pytest.warns(RuntimeWarning, match=expected_warning_text,), pytest.raises( + with pytest.warns( + RuntimeWarning, + match=expected_warning_text, + ), pytest.raises( ClientConnectionError, match=expected_exception_reason, ) as conn_err: - await sess.get(url, proxy=secure_proxy_url, ssl=client_ssl_ctx) + async with sess.get(url, proxy=secure_proxy_url, ssl=client_ssl_ctx): + pass - assert type(conn_err.value.__cause__) == TypeError + assert isinstance(conn_err.value.__cause__, TypeError) assert match_regex(f"^{type_err!s}$", str(conn_err.value.__cause__)) await sess.close() @@ -256,13 +254,11 @@ async def proxy_server(): def get_request(loop): async def _request(method="GET", *, url, trust_env=False, **kwargs): connector = aiohttp.TCPConnector(ssl=False, loop=loop) - client = aiohttp.ClientSession(connector=connector, trust_env=trust_env) - try: - resp = await client.request(method, url, **kwargs) - await resp.release() - return resp - finally: - await client.close() + async with aiohttp.ClientSession( + connector=connector, trust_env=trust_env + ) as client: + async with client.request(method, url, **kwargs) as resp: + return resp return _request @@ -402,11 +398,8 @@ async def test_proxy_http_acquired_cleanup_force(proxy_test_server, loop) -> Non assert 0 == len(conn._acquired) async def request(): - resp = await sess.get(url, proxy=proxy.url) - - assert 1 == len(conn._acquired) - - await resp.release() + async with sess.get(url, proxy=proxy.url): + assert 1 == len(conn._acquired) await request() @@ -430,13 +423,11 @@ async def request(pid): # process requests only one by one nonlocal current_pid - resp = await sess.get(url, proxy=proxy.url) - - current_pid = pid - await asyncio.sleep(0.2, loop=loop) - assert current_pid == pid + async with sess.get(url, proxy=proxy.url) as resp: + current_pid = pid + await asyncio.sleep(0.2, loop=loop) + assert current_pid == pid - await resp.release() return resp requests = [request(pid) for pid in range(multi_conn_num)] @@ -487,9 +478,8 @@ async def xtest_proxy_https_send_body(proxy_test_server, loop): proxy.return_value = {"status": 200, "body": b"1" * (2**20)} url = "https://www.google.com.ua/search?q=aiohttp proxy" - resp = await sess.get(url, proxy=proxy.url) - body = await resp.read() - await resp.release() + async with sess.get(url, proxy=proxy.url) as resp: + body = await resp.read() await sess.close() assert body == b"1" * (2**20) @@ -583,11 +573,8 @@ async def xtest_proxy_https_acquired_cleanup(proxy_test_server, loop): assert 0 == len(conn._acquired) async def request(): - resp = await sess.get(url, proxy=proxy.url) - - assert 1 == len(conn._acquired) - - await resp.release() + async with sess.get(url, proxy=proxy.url): + assert 1 == len(conn._acquired) await request() @@ -607,11 +594,8 @@ async def xtest_proxy_https_acquired_cleanup_force(proxy_test_server, loop): assert 0 == len(conn._acquired) async def request(): - resp = await sess.get(url, proxy=proxy.url) - - assert 1 == len(conn._acquired) - - await resp.release() + async with sess.get(url, proxy=proxy.url): + assert 1 == len(conn._acquired) await request() @@ -635,13 +619,11 @@ async def request(pid): # process requests only one by one nonlocal current_pid - resp = await sess.get(url, proxy=proxy.url) - - current_pid = pid - await asyncio.sleep(0.2, loop=loop) - assert current_pid == pid + async with sess.get(url, proxy=proxy.url) as resp: + current_pid = pid + await asyncio.sleep(0.2, loop=loop) + assert current_pid == pid - await resp.release() return resp requests = [request(pid) for pid in range(multi_conn_num)] @@ -847,8 +829,9 @@ async def test_proxy_auth() -> None: with pytest.raises( ValueError, match=r"proxy_auth must be None or BasicAuth\(\) tuple" ): - await session.get( + async with session.get( "http://python.org", proxy="http://proxy.example.com", proxy_auth=("user", "pass"), - ) + ): + pass diff --git a/tests/test_pytest_plugin.py b/tests/test_pytest_plugin.py index b25a553..ad22254 100644 --- a/tests/test_pytest_plugin.py +++ b/tests/test_pytest_plugin.py @@ -19,6 +19,8 @@ def test_aiohttp_plugin(testdir) -> None: from aiohttp import web +value = web.AppKey('value', str) + async def hello(request): return web.Response(body=b'Hello, world') @@ -75,10 +77,10 @@ async def test_noop() -> None: async def previous(request): if request.method == 'POST': with pytest.deprecated_call(): # FIXME: this isn't actually called - request.app['value'] = (await request.post())['value'] + request.app[value] = (await request.post())['value'] return web.Response(body=b'thanks for the data') else: - v = request.app.get('value', 'unknown') + v = request.app.get(value, 'unknown') return web.Response(body='value: {}'.format(v).encode()) @@ -98,7 +100,7 @@ async def test_set_value(cli) -> None: assert resp.status == 200 text = await resp.text() assert text == 'thanks for the data' - assert cli.server.app['value'] == 'foo' + assert cli.server.app[value] == 'foo' async def test_get_value(cli) -> None: @@ -107,7 +109,7 @@ async def test_get_value(cli) -> None: text = await resp.text() assert text == 'value: unknown' with pytest.warns(DeprecationWarning): - cli.server.app['value'] = 'bar' + cli.server.app[value] = 'bar' resp = await cli.get('/') assert resp.status == 200 text = await resp.text() @@ -119,7 +121,6 @@ def test_noncoro() -> None: async def test_failed_to_create_client(aiohttp_client) -> None: - def make_app(loop): raise RuntimeError() @@ -142,7 +143,6 @@ async def test_custom_port_test_server(aiohttp_server, aiohttp_unused_port): port = aiohttp_unused_port() server = await aiohttp_server(app, port=port) assert server.port == port - """ ) testdir.makeconftest(CONFTEST) diff --git a/tests/test_resolver.py b/tests/test_resolver.py index 1b389f3..f8fba50 100644 --- a/tests/test_resolver.py +++ b/tests/test_resolver.py @@ -1,25 +1,57 @@ import asyncio import ipaddress import socket -from typing import Any, List -from unittest.mock import Mock, patch +from ipaddress import ip_address +from typing import Any, Awaitable, Callable, Collection, List, NamedTuple, Tuple, Union +from unittest.mock import Mock, create_autospec, patch import pytest -from aiohttp.resolver import AsyncResolver, DefaultResolver, ThreadedResolver +from aiohttp.resolver import ( + _NAME_SOCKET_FLAGS, + _SUPPORTS_SCOPE_ID, + AsyncResolver, + DefaultResolver, + ThreadedResolver, +) try: import aiodns - gethostbyname = hasattr(aiodns.DNSResolver, "gethostbyname") + getaddrinfo: Any = hasattr(aiodns.DNSResolver, "getaddrinfo") except ImportError: - aiodns = None - gethostbyname = False + aiodns = None # type: ignore[assignment] + getaddrinfo = False -class FakeResult: - def __init__(self, addresses): - self.addresses = addresses +class FakeAIODNSAddrInfoNode(NamedTuple): + + family: int + addr: Union[Tuple[bytes, int], Tuple[bytes, int, int, int]] + + +class FakeAIODNSAddrInfoIPv4Result: + def __init__(self, hosts: Collection[str]) -> None: + self.nodes = [ + FakeAIODNSAddrInfoNode(socket.AF_INET, (h.encode(), 0)) for h in hosts + ] + + +class FakeAIODNSAddrInfoIPv6Result: + def __init__(self, hosts: Collection[str]) -> None: + self.nodes = [ + FakeAIODNSAddrInfoNode( + socket.AF_INET6, + (h.encode(), 0, 0, 3 if ip_address(h).is_link_local else 0), + ) + for h in hosts + ] + + +class FakeAIODNSNameInfoIPv6Result: + def __init__(self, host: str) -> None: + self.node = host + self.service = None class FakeQueryResult: @@ -27,16 +59,30 @@ def __init__(self, host): self.host = host -async def fake_result(addresses): - return FakeResult(addresses=tuple(addresses)) +async def fake_aiodns_getaddrinfo_ipv4_result( + hosts: Collection[str], +) -> FakeAIODNSAddrInfoIPv4Result: + return FakeAIODNSAddrInfoIPv4Result(hosts=hosts) + + +async def fake_aiodns_getaddrinfo_ipv6_result( + hosts: Collection[str], +) -> FakeAIODNSAddrInfoIPv6Result: + return FakeAIODNSAddrInfoIPv6Result(hosts=hosts) + + +async def fake_aiodns_getnameinfo_ipv6_result( + host: str, +) -> FakeAIODNSNameInfoIPv6Result: + return FakeAIODNSNameInfoIPv6Result(host) async def fake_query_result(result): return [FakeQueryResult(host=h) for h in result] -def fake_addrinfo(hosts): - async def fake(*args, **kwargs): +def fake_addrinfo(hosts: Collection[str]) -> Callable[..., Awaitable[Any]]: + async def fake(*args: Any, **kwargs: Any) -> List[Any]: if not hosts: raise socket.gaierror @@ -45,33 +91,81 @@ async def fake(*args, **kwargs): return fake -@pytest.mark.skipif(not gethostbyname, reason="aiodns 1.1 required") -async def test_async_resolver_positive_lookup(loop) -> None: +def fake_ipv6_addrinfo(hosts: Collection[str]) -> Callable[..., Awaitable[Any]]: + async def fake(*args: Any, **kwargs: Any) -> List[Any]: + if not hosts: + raise socket.gaierror + + return [ + ( + socket.AF_INET6, + None, + socket.SOCK_STREAM, + None, + (h, 0, 0, 3 if ip_address(h).is_link_local else 0), + ) + for h in hosts + ] + + return fake + + +def fake_ipv6_nameinfo(host: str) -> Callable[..., Awaitable[Any]]: + async def fake(*args: Any, **kwargs: Any) -> Tuple[str, int]: + return host, 0 + + return fake + + +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_async_resolver_positive_ipv4_lookup(loop: Any) -> None: with patch("aiodns.DNSResolver") as mock: - mock().gethostbyname.return_value = fake_result(["127.0.0.1"]) - resolver = AsyncResolver(loop=loop) + mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv4_result( + ["127.0.0.1"] + ) + resolver = AsyncResolver() real = await resolver.resolve("www.python.org") ipaddress.ip_address(real[0]["host"]) - mock().gethostbyname.assert_called_with("www.python.org", socket.AF_INET) - - -@pytest.mark.skipif(aiodns is None, reason="aiodns required") -async def test_async_resolver_query_positive_lookup(loop) -> None: + mock().getaddrinfo.assert_called_with( + "www.python.org", + family=socket.AF_INET, + flags=socket.AI_ADDRCONFIG, + port=0, + type=socket.SOCK_STREAM, + ) + + +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +@pytest.mark.skipif( + not _SUPPORTS_SCOPE_ID, reason="python version does not support scope id" +) +async def test_async_resolver_positive_link_local_ipv6_lookup(loop: Any) -> None: with patch("aiodns.DNSResolver") as mock: - del mock().gethostbyname - mock().query.return_value = fake_query_result(["127.0.0.1"]) - resolver = AsyncResolver(loop=loop) + mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv6_result( + ["fe80::1"] + ) + mock().getnameinfo.return_value = fake_aiodns_getnameinfo_ipv6_result( + "fe80::1%eth0" + ) + resolver = AsyncResolver() real = await resolver.resolve("www.python.org") ipaddress.ip_address(real[0]["host"]) - mock().query.assert_called_with("www.python.org", "A") - - -@pytest.mark.skipif(not gethostbyname, reason="aiodns 1.1 required") -async def test_async_resolver_multiple_replies(loop) -> None: + mock().getaddrinfo.assert_called_with( + "www.python.org", + family=socket.AF_INET, + flags=socket.AI_ADDRCONFIG, + port=0, + type=socket.SOCK_STREAM, + ) + mock().getnameinfo.assert_called_with(("fe80::1", 0, 0, 3), _NAME_SOCKET_FLAGS) + + +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_async_resolver_multiple_replies(loop: Any) -> None: with patch("aiodns.DNSResolver") as mock: ips = ["127.0.0.1", "127.0.0.2", "127.0.0.3", "127.0.0.4"] - mock().gethostbyname.return_value = fake_result(ips) - resolver = AsyncResolver(loop=loop) + mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv4_result(ips) + resolver = AsyncResolver() real = await resolver.resolve("www.google.com") ips = [ipaddress.ip_address(x["host"]) for x in real] assert len(ips) > 3, "Expecting multiple addresses" @@ -88,40 +182,20 @@ async def test_async_resolver_query_multiple_replies(loop) -> None: ips = [ipaddress.ip_address(x["host"]) for x in real] -@pytest.mark.skipif(not gethostbyname, reason="aiodns 1.1 required") -async def test_async_resolver_negative_lookup(loop) -> None: +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_async_resolver_negative_lookup(loop: Any) -> None: with patch("aiodns.DNSResolver") as mock: - mock().gethostbyname.side_effect = aiodns.error.DNSError() - resolver = AsyncResolver(loop=loop) + mock().getaddrinfo.side_effect = aiodns.error.DNSError() + resolver = AsyncResolver() with pytest.raises(OSError): await resolver.resolve("doesnotexist.bla") -@pytest.mark.skipif(aiodns is None, reason="aiodns required") -async def test_async_resolver_query_negative_lookup(loop) -> None: +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_async_resolver_no_hosts_in_getaddrinfo(loop: Any) -> None: with patch("aiodns.DNSResolver") as mock: - del mock().gethostbyname - mock().query.side_effect = aiodns.error.DNSError() - resolver = AsyncResolver(loop=loop) - with pytest.raises(OSError): - await resolver.resolve("doesnotexist.bla") - - -@pytest.mark.skipif(aiodns is None, reason="aiodns required") -async def test_async_resolver_no_hosts_in_query(loop) -> None: - with patch("aiodns.DNSResolver") as mock: - del mock().gethostbyname - mock().query.return_value = fake_query_result([]) - resolver = AsyncResolver(loop=loop) - with pytest.raises(OSError): - await resolver.resolve("doesnotexist.bla") - - -@pytest.mark.skipif(not gethostbyname, reason="aiodns 1.1 required") -async def test_async_resolver_no_hosts_in_gethostbyname(loop) -> None: - with patch("aiodns.DNSResolver") as mock: - mock().gethostbyname.return_value = fake_result([]) - resolver = AsyncResolver(loop=loop) + mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv4_result([]) + resolver = AsyncResolver() with pytest.raises(OSError): await resolver.resolve("doesnotexist.bla") @@ -135,6 +209,39 @@ async def test_threaded_resolver_positive_lookup() -> None: ipaddress.ip_address(real[0]["host"]) +@pytest.mark.skipif( + not _SUPPORTS_SCOPE_ID, reason="python version does not support scope id" +) +async def test_threaded_resolver_positive_ipv6_link_local_lookup() -> None: + loop = Mock() + loop.getaddrinfo = fake_ipv6_addrinfo(["fe80::1"]) + loop.getnameinfo = fake_ipv6_nameinfo("fe80::1%eth0") + + # Mock the fake function that was returned by helper functions + loop.getaddrinfo = create_autospec(loop.getaddrinfo) + loop.getnameinfo = create_autospec(loop.getnameinfo) + + # Set the correct return values for mock functions + loop.getaddrinfo.return_value = await fake_ipv6_addrinfo(["fe80::1"])() + loop.getnameinfo.return_value = await fake_ipv6_nameinfo("fe80::1%eth0")() + + resolver = ThreadedResolver() + resolver._loop = loop + real = await resolver.resolve("www.python.org") + assert real[0]["hostname"] == "www.python.org" + ipaddress.ip_address(real[0]["host"]) + + loop.getaddrinfo.assert_called_with( + "www.python.org", + 0, + type=socket.SOCK_STREAM, + family=socket.AF_INET, + flags=socket.AI_ADDRCONFIG, + ) + + loop.getnameinfo.assert_called_with(("fe80::1", 0, 0, 3), _NAME_SOCKET_FLAGS) + + async def test_threaded_resolver_multiple_replies() -> None: loop = Mock() ips = ["127.0.0.1", "127.0.0.2", "127.0.0.3", "127.0.0.4"] @@ -154,6 +261,16 @@ async def test_threaded_negative_lookup() -> None: await resolver.resolve("doesnotexist.bla") +async def test_threaded_negative_ipv6_lookup() -> None: + loop = Mock() + ips: List[Any] = [] + loop.getaddrinfo = fake_ipv6_addrinfo(ips) + resolver = ThreadedResolver() + resolver._loop = loop + with pytest.raises(socket.gaierror): + await resolver.resolve("doesnotexist.bla") + + async def test_threaded_negative_lookup_with_unknown_result() -> None: loop = Mock() @@ -195,21 +312,20 @@ async def test_default_loop_for_threaded_resolver(loop) -> None: assert resolver._loop is loop -@pytest.mark.skipif(aiodns is None, reason="aiodns required") -async def test_default_loop_for_async_resolver(loop) -> None: - asyncio.set_event_loop(loop) - resolver = AsyncResolver() - assert resolver._loop is loop - - -@pytest.mark.skipif(not gethostbyname, reason="aiodns 1.1 required") -async def test_async_resolver_ipv6_positive_lookup(loop) -> None: +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_async_resolver_ipv6_positive_lookup(loop: Any) -> None: with patch("aiodns.DNSResolver") as mock: - mock().gethostbyname.return_value = fake_result(["::1"]) - resolver = AsyncResolver(loop=loop) - real = await resolver.resolve("www.python.org", family=socket.AF_INET6) + mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv6_result(["::1"]) + resolver = AsyncResolver() + real = await resolver.resolve("www.python.org") ipaddress.ip_address(real[0]["host"]) - mock().gethostbyname.assert_called_with("www.python.org", socket.AF_INET6) + mock().getaddrinfo.assert_called_with( + "www.python.org", + family=socket.AF_INET, + flags=socket.AI_ADDRCONFIG, + port=0, + type=socket.SOCK_STREAM, + ) @pytest.mark.skipif(aiodns is None, reason="aiodns required") @@ -223,15 +339,75 @@ async def test_async_resolver_query_ipv6_positive_lookup(loop) -> None: mock().query.assert_called_with("www.python.org", "AAAA") -async def test_async_resolver_aiodns_not_present(loop, monkeypatch) -> None: +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_async_resolver_query_fallback_error_messages_passed( + loop: asyncio.AbstractEventLoop, +) -> None: + """Ensure error messages are passed through from aiodns with query fallback.""" + with patch("aiodns.DNSResolver", autospec=True, spec_set=True) as mock: + del mock().gethostbyname + mock().query.side_effect = aiodns.error.DNSError(1, "Test error message") + resolver = AsyncResolver() + with pytest.raises(OSError, match="Test error message") as excinfo: + await resolver.resolve("x.org") + + assert excinfo.value.strerror == "Test error message" + + +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_async_resolver_query_fallback_error_messages_passed_no_hosts( + loop: asyncio.AbstractEventLoop, +) -> None: + """Ensure error messages are passed through from aiodns with query fallback.""" + with patch("aiodns.DNSResolver", autospec=True, spec_set=True) as mock: + del mock().gethostbyname + mock().query.return_value = fake_query_result([]) + resolver = AsyncResolver() + with pytest.raises(OSError, match="DNS lookup failed") as excinfo: + await resolver.resolve("x.org") + + assert excinfo.value.strerror == "DNS lookup failed" + + +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_async_resolver_error_messages_passed( + loop: asyncio.AbstractEventLoop, +) -> None: + """Ensure error messages are passed through from aiodns.""" + with patch("aiodns.DNSResolver", autospec=True, spec_set=True) as mock: + mock().getaddrinfo.side_effect = aiodns.error.DNSError(1, "Test error message") + resolver = AsyncResolver() + with pytest.raises(OSError, match="Test error message") as excinfo: + await resolver.resolve("x.org") + + assert excinfo.value.strerror == "Test error message" + + +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_async_resolver_error_messages_passed_no_hosts( + loop: asyncio.AbstractEventLoop, +) -> None: + """Ensure error messages are passed through from aiodns.""" + with patch("aiodns.DNSResolver", autospec=True, spec_set=True) as mock: + mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv6_result([]) + resolver = AsyncResolver() + with pytest.raises(OSError, match="DNS lookup failed") as excinfo: + await resolver.resolve("x.org") + + assert excinfo.value.strerror == "DNS lookup failed" + + +async def test_async_resolver_aiodns_not_present(loop: Any, monkeypatch: Any) -> None: monkeypatch.setattr("aiohttp.resolver.aiodns", None) with pytest.raises(RuntimeError): AsyncResolver(loop=loop) -def test_default_resolver() -> None: - # if gethostbyname: - # assert DefaultResolver is AsyncResolver - # else: - # assert DefaultResolver is ThreadedResolver +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +def test_aio_dns_is_default() -> None: + assert DefaultResolver is AsyncResolver + + +@pytest.mark.skipif(getaddrinfo, reason="aiodns <3.2.0 required") +def test_threaded_resolver_is_default() -> None: assert DefaultResolver is ThreadedResolver diff --git a/tests/test_run_app.py b/tests/test_run_app.py index 5696928..74d8c79 100644 --- a/tests/test_run_app.py +++ b/tests/test_run_app.py @@ -9,13 +9,13 @@ import subprocess import sys import time -from typing import Callable, NoReturn, Set +from typing import AsyncIterator, Callable, NoReturn, Set from unittest import mock from uuid import uuid4 import pytest -from aiohttp import ClientConnectorError, ClientSession, WSCloseCode, web +from aiohttp import ClientConnectorError, ClientSession, ClientTimeout, WSCloseCode, web from aiohttp.test_utils import make_mocked_coro from aiohttp.web_runner import BaseRunner @@ -906,6 +906,23 @@ async def init(): assert count == 3 +def test_run_app_raises_exception(patched_loop: asyncio.AbstractEventLoop) -> None: + async def context(app: web.Application) -> AsyncIterator[None]: + raise RuntimeError("foo") + yield # pragma: no cover + + app = web.Application() + app.cleanup_ctx.append(context) + + with mock.patch.object( + patched_loop, "call_exception_handler", autospec=True, spec_set=True + ) as m: + with pytest.raises(RuntimeError, match="foo"): + web.run_app(app, loop=patched_loop) + + assert not m.called + + class TestShutdown: def raiser(self) -> NoReturn: raise KeyboardInterrupt @@ -915,13 +932,34 @@ async def stop(self, request: web.Request) -> web.Response: return web.Response() def run_app(self, port: int, timeout: int, task, extra_test=None) -> asyncio.Task: + num_connections = -1 + + class DictRecordClear(dict): + def clear(self): + nonlocal num_connections + # During Server.shutdown() we want to know how many connections still + # remained before it got cleared. If the handler completed successfully + # the connection should've been removed already. If not, this may + # indicate a memory leak. + num_connections = len(self) + super().clear() + + class ServerWithRecordClear(web.Server): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._connections = DictRecordClear() + async def test() -> None: await asyncio.sleep(0.5) async with ClientSession() as sess: for _ in range(5): # pragma: no cover try: - async with sess.get(f"http://localhost:{port}/"): - pass + with pytest.raises(asyncio.TimeoutError): + async with sess.get( + f"http://localhost:{port}/", + timeout=ClientTimeout(total=0.1), + ): + pass except ClientConnectorError: await asyncio.sleep(0.5) else: @@ -941,6 +979,7 @@ async def run_test(app: web.Application) -> None: async def handler(request: web.Request) -> web.Response: nonlocal t t = asyncio.create_task(task()) + await t return web.Response(text="FOO") t = test_task = None @@ -949,11 +988,12 @@ async def handler(request: web.Request) -> web.Response: app.router.add_get("/", handler) app.router.add_get("/stop", self.stop) - web.run_app(app, port=port, shutdown_timeout=timeout) + with mock.patch("aiohttp.web_app.Server", ServerWithRecordClear): + web.run_app(app, port=port, shutdown_timeout=timeout) assert test_task.exception() is None - return t + return t, num_connections - def test_shutdown_wait_for_task( + def test_shutdown_wait_for_handler( self, aiohttp_unused_port: Callable[[], int] ) -> None: port = aiohttp_unused_port() @@ -964,13 +1004,14 @@ async def task(): await asyncio.sleep(2) finished = True - t = self.run_app(port, 3, task) + t, connection_count = self.run_app(port, 3, task) assert finished is True assert t.done() assert not t.cancelled() + assert connection_count == 0 - def test_shutdown_timeout_task( + def test_shutdown_timeout_handler( self, aiohttp_unused_port: Callable[[], int] ) -> None: port = aiohttp_unused_port() @@ -981,39 +1022,12 @@ async def task(): await asyncio.sleep(2) finished = True - t = self.run_app(port, 1, task) + t, connection_count = self.run_app(port, 1, task) assert finished is False assert t.done() assert t.cancelled() - - def test_shutdown_wait_for_spawned_task( - self, aiohttp_unused_port: Callable[[], int] - ) -> None: - port = aiohttp_unused_port() - finished = False - finished_sub = False - sub_t = None - - async def sub_task(): - nonlocal finished_sub - await asyncio.sleep(1.5) - finished_sub = True - - async def task(): - nonlocal finished, sub_t - await asyncio.sleep(0.5) - sub_t = asyncio.create_task(sub_task()) - finished = True - - t = self.run_app(port, 3, task) - - assert finished is True - assert t.done() - assert not t.cancelled() - assert finished_sub is True - assert sub_t.done() - assert not sub_t.cancelled() + assert connection_count == 1 def test_shutdown_timeout_not_reached( self, aiohttp_unused_port: Callable[[], int] @@ -1027,10 +1041,11 @@ async def task(): finished = True start_time = time.time() - t = self.run_app(port, 15, task) + t, connection_count = self.run_app(port, 15, task) assert finished is True assert t.done() + assert connection_count == 0 # Verify run_app has not waited for timeout. assert time.time() - start_time < 10 @@ -1055,10 +1070,11 @@ async def test(sess: ClientSession) -> None: pass assert finished is False - t = self.run_app(port, 10, task, test) + t, connection_count = self.run_app(port, 10, task, test) assert finished is True assert t.done() + assert connection_count == 0 def test_shutdown_pending_handler_responds( self, aiohttp_unused_port: Callable[[], int] @@ -1191,3 +1207,54 @@ async def run_test(app: web.Application) -> None: assert time.time() - start < 5 assert client_finished assert server_finished + + def test_shutdown_handler_cancellation_suppressed( + self, aiohttp_unused_port: Callable[[], int] + ) -> None: + port = aiohttp_unused_port() + actions = [] + + async def test() -> None: + async def test_resp(sess): + t = ClientTimeout(total=0.4) + with pytest.raises(asyncio.TimeoutError): + async with sess.get(f"http://localhost:{port}/", timeout=t) as resp: + assert await resp.text() == "FOO" + actions.append("CANCELLED") + + async with ClientSession() as sess: + t = asyncio.create_task(test_resp(sess)) + await asyncio.sleep(0.5) + # Handler is in-progress while we trigger server shutdown. + actions.append("PRESTOP") + async with sess.get(f"http://localhost:{port}/stop"): + pass + + actions.append("STOPPING") + # Handler should still complete and produce a response. + await t + + async def run_test(app: web.Application) -> None: + nonlocal t + t = asyncio.create_task(test()) + yield + await t + + async def handler(request: web.Request) -> web.Response: + try: + await asyncio.sleep(5) + except asyncio.CancelledError: + actions.append("SUPPRESSED") + await asyncio.sleep(2) + actions.append("DONE") + return web.Response(text="FOO") + + t = None + app = web.Application() + app.cleanup_ctx.append(run_test) + app.router.add_get("/", handler) + app.router.add_get("/stop", self.stop) + + web.run_app(app, port=port, shutdown_timeout=2, handler_cancellation=True) + assert t.exception() is None + assert actions == ["CANCELLED", "SUPPRESSED", "PRESTOP", "STOPPING", "DONE"] diff --git a/tests/test_streams.py b/tests/test_streams.py index 115371c..fcf13a9 100644 --- a/tests/test_streams.py +++ b/tests/test_streams.py @@ -1126,6 +1126,7 @@ async def test_unread_empty(self) -> None: async def test_empty_stream_reader() -> None: s = streams.EmptyStreamReader() assert str(s) is not None + assert repr(s) == "" assert s.set_exception(ValueError()) is None assert s.exception() is None assert s.feed_eof() is None diff --git a/tests/test_test_utils.py b/tests/test_test_utils.py index 1ac742f..a9c5179 100644 --- a/tests/test_test_utils.py +++ b/tests/test_test_utils.py @@ -239,6 +239,11 @@ def test_make_mocked_request_content() -> None: assert req.content is payload +async def test_make_mocked_request_empty_payload() -> None: + req = make_mocked_request("GET", "/") + assert await req.read() == b"" + + def test_make_mocked_request_transport() -> None: transport = mock.Mock() req = make_mocked_request("GET", "/", transport=transport) @@ -259,7 +264,7 @@ async def test_test_client_props(loop) -> None: async def test_test_client_raw_server_props(loop) -> None: async def hello(request): - return web.Response(body=_hello_world_bytes) + return web.Response() # pragma: no cover client = _TestClient(_RawTestServer(hello, host="127.0.0.1", loop=loop), loop=loop) assert client.host == "127.0.0.1" @@ -366,3 +371,15 @@ def factory(*args, **kwargs) -> socket: pass assert factory_called + + +@pytest.mark.parametrize( + ("hostname", "expected_host"), + [("127.0.0.1", "127.0.0.1"), ("localhost", "127.0.0.1"), ("::1", "::1")], +) +async def test_test_server_hostnames(hostname, expected_host, loop) -> None: + app = _create_example_app() + server = _TestServer(app, host=hostname, loop=loop) + async with server: + pass + assert server.host == expected_host diff --git a/tests/test_urldispatch.py b/tests/test_urldispatch.py index 4f3abb8..de68155 100644 --- a/tests/test_urldispatch.py +++ b/tests/test_urldispatch.py @@ -1,7 +1,7 @@ import pathlib import re from collections.abc import Container, Iterable, Mapping, MutableMapping, Sized -from urllib.parse import unquote +from urllib.parse import quote, unquote import pytest from re_assert import Matches @@ -339,6 +339,21 @@ def test_route_dynamic(router) -> None: assert route is route2 +def test_add_static_path_checks(router: any, tmp_path: pathlib.Path) -> None: + """Test that static paths must exist and be directories.""" + with pytest.raises(ValueError, match="does not exist"): + router.add_static("/", tmp_path / "does-not-exist") + with pytest.raises(ValueError, match="is not a directory"): + router.add_static("/", __file__) + + +def test_add_static_path_resolution(router: any) -> None: + """Test that static paths are expanded and absolute.""" + res = router.add_static("/", "~/..") + directory = str(res.get_info()["directory"]) + assert directory == str(pathlib.Path.home().parent) + + def test_add_static(router) -> None: resource = router.add_static( "/st", pathlib.Path(aiohttp.__file__).parent, name="static" @@ -515,19 +530,24 @@ def test_static_remove_trailing_slash(router) -> None: assert "/prefix" == route._prefix -async def test_add_route_with_re(router) -> None: +@pytest.mark.parametrize( + "pattern,url,expected", + ( + (r"{to:\d+}", r"1234", {"to": "1234"}), + ("{name}.html", "test.html", {"name": "test"}), + (r"{fn:\w+ \d+}", "abc 123", {"fn": "abc 123"}), + (r"{fn:\w+\s\d+}", "abc 123", {"fn": "abc 123"}), + ), +) +async def test_add_route_with_re( + router: web.UrlDispatcher, pattern: str, url: str, expected +) -> None: handler = make_handler() - router.add_route("GET", r"/handler/{to:\d+}", handler) - - req = make_mocked_request("GET", "/handler/1234") + router.add_route("GET", f"/handler/{pattern}", handler) + req = make_mocked_request("GET", f"/handler/{url}") info = await router.resolve(req) assert info is not None - assert {"to": "1234"} == info - - router.add_route("GET", r"/handler/{name}.html", handler) - req = make_mocked_request("GET", "/handler/test.html") - info = await router.resolve(req) - assert {"name": "test"} == info + assert info == expected async def test_add_route_with_re_and_slashes(router) -> None: @@ -722,6 +742,17 @@ async def test_dynamic_match_unquoted_path(router) -> None: assert match_info == {"path": "path", "subpath": unquote(resource_id)} +async def test_dynamic_match_double_quoted_path(router: web.UrlDispatcher) -> None: + """Verify that double-quoted path is unquoted only once.""" + handler = make_handler() + router.add_route("GET", "/{path}/{subpath}", handler) + resource_id = quote("my/path|with!some%strange$characters", safe="") + double_quoted_resource_id = quote(resource_id, safe="") + req = make_mocked_request("GET", f"/path/{double_quoted_resource_id}") + match_info = await router.resolve(req) + assert match_info == {"path": "path", "subpath": resource_id} + + def test_add_route_not_started_with_slash(router) -> None: with pytest.raises(ValueError): handler = make_handler() @@ -1258,10 +1289,17 @@ async def test_prefixed_subapp_overlap(app) -> None: subapp2.router.add_get("/b", handler2) app.add_subapp("/ss", subapp2) + subapp3 = web.Application() + handler3 = make_handler() + subapp3.router.add_get("/c", handler3) + app.add_subapp("/s/s", subapp3) + match_info = await app.router.resolve(make_mocked_request("GET", "/s/a")) assert match_info.route.handler is handler1 match_info = await app.router.resolve(make_mocked_request("GET", "/ss/b")) assert match_info.route.handler is handler2 + match_info = await app.router.resolve(make_mocked_request("GET", "/s/s/c")) + assert match_info.route.handler is handler3 async def test_prefixed_subapp_empty_route(app) -> None: diff --git a/tests/test_web_app.py b/tests/test_web_app.py index 3688cf2..6a86a34 100644 --- a/tests/test_web_app.py +++ b/tests/test_web_app.py @@ -1,6 +1,6 @@ import asyncio import gc -from typing import AsyncIterator, Callable, Iterator, NoReturn +from typing import AsyncIterator, Callable, Iterator, NoReturn, Type from unittest import mock import pytest @@ -331,7 +331,7 @@ def test_app_run_middlewares() -> None: @web.middleware async def middleware(request: web.Request, handler: Handler) -> web.StreamResponse: - return await handler(request) + return await handler(request) # pragma: no cover root = web.Application(middlewares=[middleware]) sub = web.Application() @@ -476,7 +476,10 @@ async def fail_ctx(app: web.Application) -> AsyncIterator[NoReturn]: assert ctx_state == "CLEAN" -async def test_cleanup_ctx_exception_on_cleanup_multiple() -> None: +@pytest.mark.parametrize("exc_cls", (Exception, asyncio.CancelledError)) +async def test_cleanup_ctx_exception_on_cleanup_multiple( + exc_cls: Type[BaseException], +) -> None: app = web.Application() out = [] @@ -488,7 +491,7 @@ async def inner(app: web.Application) -> AsyncIterator[None]: yield None out.append("post_" + str(num)) if fail: - raise Exception("fail_" + str(num)) + raise exc_cls("fail_" + str(num)) return inner diff --git a/tests/test_web_exceptions.py b/tests/test_web_exceptions.py index 69deb27..3358a94 100644 --- a/tests/test_web_exceptions.py +++ b/tests/test_web_exceptions.py @@ -270,3 +270,8 @@ def test_unicode_text_body_unauthorized() -> None: ): resp = web.HTTPUnauthorized(body="text") assert resp.status == 401 + + +def test_multiline_reason() -> None: + with pytest.raises(ValueError, match=r"Reason cannot contain \\n"): + web.HTTPOk(reason="Bad\r\nInjected-header: foo") diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index ee61537..eadb43b 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -3,8 +3,9 @@ import json import pathlib import socket +import sys import zlib -from typing import Any, Optional +from typing import Any, NoReturn, Optional from unittest import mock import pytest @@ -22,8 +23,10 @@ web, ) from aiohttp.hdrs import CONTENT_LENGTH, CONTENT_TYPE, TRANSFER_ENCODING +from aiohttp.pytest_plugin import AiohttpClient from aiohttp.test_utils import make_mocked_coro from aiohttp.typedefs import Handler +from aiohttp.web_protocol import RequestHandler try: import brotlicffi as brotli @@ -101,12 +104,8 @@ async def handler(request): server = await aiohttp_server(app, logger=logger) client = await aiohttp_client(server) - with pytest.raises(aiohttp.ServerDisconnectedError): - await client.get("/") - - logger.exception.assert_called_with( - "Unhandled runtime exception", exc_info=mock.ANY - ) + async with client.get("/") as resp: + assert resp.status == 500 async def test_handler_returns_none(aiohttp_server, aiohttp_client) -> None: @@ -121,13 +120,22 @@ async def handler(request): server = await aiohttp_server(app, logger=logger) client = await aiohttp_client(server) - with pytest.raises(aiohttp.ServerDisconnectedError): - await client.get("/") + async with client.get("/") as resp: + assert resp.status == 500 - # Actual error text is placed in exc_info - logger.exception.assert_called_with( - "Unhandled runtime exception", exc_info=mock.ANY - ) + +async def test_handler_returns_not_response_after_100expect( + aiohttp_server, aiohttp_client +) -> None: + async def handler(request: web.Request) -> NoReturn: + raise Exception("foo") + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + async with client.get("/", expect100=True) as resp: + assert resp.status == 500 async def test_head_returns_empty_body(aiohttp_client) -> None: @@ -148,6 +156,21 @@ async def handler(request): assert resp.headers["Content-Length"] == "4" +@pytest.mark.parametrize("status", (201, 204, 404)) +async def test_default_content_type_no_body(aiohttp_client: Any, status: int) -> None: + async def handler(request): + return web.Response(status=status) + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + async with client.get("/") as resp: + assert resp.status == status + assert await resp.read() == b"" + assert "Content-Type" not in resp.headers + + async def test_response_before_complete(aiohttp_client: Any) -> None: async def handler(request): return web.Response(body=b"OK") @@ -166,8 +189,42 @@ async def handler(request): await resp.release() -async def test_post_form(aiohttp_client) -> None: - async def handler(request): +@pytest.mark.skipif(sys.version_info < (3, 11), reason="Needs Task.cancelling()") +async def test_cancel_shutdown(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: + t = asyncio.create_task(request.protocol.shutdown()) + # Ensure it's started waiting + await asyncio.sleep(0) + + t.cancel() + # Cancellation should not be suppressed + with pytest.raises(asyncio.CancelledError): + await t + + # Repeat for second waiter in shutdown() + with mock.patch.object(request.protocol, "_request_in_progress", False): + with mock.patch.object(request.protocol, "_current_request", None): + t = asyncio.create_task(request.protocol.shutdown()) + await asyncio.sleep(0) + + t.cancel() + with pytest.raises(asyncio.CancelledError): + await t + + return web.Response(body=b"OK") + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + async with client.get("/") as resp: + assert resp.status == 200 + txt = await resp.text() + assert txt == "OK" + + +async def test_post_form(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: data = await request.post() assert {"a": "1", "b": "2", "c": ""} == data return web.Response(body=b"OK") @@ -2226,3 +2283,49 @@ async def handler(_): assert TRANSFER_ENCODING not in resp.headers await resp.read() == b"" await resp.release() + + +async def test_keepalive_race_condition(aiohttp_client: Any) -> None: + protocol = None + orig_data_received = RequestHandler.data_received + + def delay_received(self, data: bytes) -> None: + """Emulate race condition. + + The keepalive callback needs to be called between data_received() and + when start() resumes from the waiter set within data_received(). + """ + data = orig_data_received(self, data) + if protocol is None: # First request creating the keepalive connection. + return data + + assert self is protocol + assert protocol._keepalive_handle is not None + # Cancel existing callback that would run at some point in future. + protocol._keepalive_handle.cancel() + protocol._keepalive_handle = None + + # Set next run time into the past and run callback manually. + protocol._next_keepalive_close_time = asyncio.get_running_loop().time() - 1 + protocol._process_keepalive() + + return data + + async def handler(request: web.Request) -> web.Response: + nonlocal protocol + protocol = request.protocol + return web.Response() + + target = "aiohttp.web_protocol.RequestHandler.data_received" + with mock.patch(target, delay_received): + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + # Open connection, so we have a keepalive connection and reference to protocol. + async with client.get("/") as resp: + assert resp.status == 200 + assert protocol is not None + # Make 2nd request which will hit the race condition. + async with client.get("/") as resp: + assert resp.status == 200 diff --git a/tests/test_web_middleware.py b/tests/test_web_middleware.py index dbe23e0..9c4462b 100644 --- a/tests/test_web_middleware.py +++ b/tests/test_web_middleware.py @@ -24,10 +24,13 @@ async def middleware(request, handler: Handler): app.middlewares.append(middleware) app.router.add_route("GET", "/", handler) client = await aiohttp_client(app) - resp = await client.get("/") - assert 201 == resp.status - txt = await resp.text() - assert "OK[MIDDLEWARE]" == txt + + # Call twice to verify cache works + for _ in range(2): + resp = await client.get("/") + assert 201 == resp.status + txt = await resp.text() + assert "OK[MIDDLEWARE]" == txt async def test_middleware_handles_exception(loop, aiohttp_client) -> None: @@ -44,10 +47,13 @@ async def middleware(request, handler: Handler): app.middlewares.append(middleware) app.router.add_route("GET", "/", handler) client = await aiohttp_client(app) - resp = await client.get("/") - assert 501 == resp.status - txt = await resp.text() - assert "Error text[MIDDLEWARE]" == txt + + # Call twice to verify cache works + for _ in range(2): + resp = await client.get("/") + assert 501 == resp.status + txt = await resp.text() + assert "Error text[MIDDLEWARE]" == txt async def test_middleware_chain(loop, aiohttp_client) -> None: diff --git a/tests/test_web_request.py b/tests/test_web_request.py index c6398ac..9e613bb 100644 --- a/tests/test_web_request.py +++ b/tests/test_web_request.py @@ -169,6 +169,22 @@ def test_absolute_url() -> None: assert req.rel_url == URL.build(path="/path/to", query={"a": "1"}) +def test_clone_absolute_scheme() -> None: + req = make_mocked_request("GET", "https://example.com/path/to?a=1") + assert req.scheme == "https" + req2 = req.clone(scheme="http") + assert req2.scheme == "http" + assert req2.url.scheme == "http" + + +def test_clone_absolute_host() -> None: + req = make_mocked_request("GET", "https://example.com/path/to?a=1") + assert req.host == "example.com" + req2 = req.clone(host="foo.test") + assert req2.host == "foo.test" + assert req2.url.host == "foo.test" + + def test_content_length() -> None: req = make_mocked_request("Get", "/", CIMultiDict([("CONTENT-LENGTH", "123")])) @@ -510,6 +526,16 @@ def test_url_url() -> None: assert URL("http://example.com/path") == req.url +def test_url_non_default_port() -> None: + req = make_mocked_request("GET", "/path", headers={"HOST": "example.com:8123"}) + assert req.url == URL("http://example.com:8123/path") + + +def test_url_ipv6() -> None: + req = make_mocked_request("GET", "/path", headers={"HOST": "[::1]:8123"}) + assert req.url == URL("http://[::1]:8123/path") + + def test_clone() -> None: req = make_mocked_request("GET", "/path") req2 = req.clone() @@ -684,18 +710,23 @@ def test_save_state_on_clone() -> None: def test_clone_scheme() -> None: req = make_mocked_request("GET", "/") + assert req.scheme == "http" req2 = req.clone(scheme="https") assert req2.scheme == "https" + assert req2.url.scheme == "https" def test_clone_host() -> None: req = make_mocked_request("GET", "/") + assert req.host != "example.com" req2 = req.clone(host="example.com") assert req2.host == "example.com" + assert req2.url.host == "example.com" def test_clone_remote() -> None: req = make_mocked_request("GET", "/") + assert req.remote != "11.11.11.11" req2 = req.clone(remote="11.11.11.11") assert req2.remote == "11.11.11.11" diff --git a/tests/test_web_request_handler.py b/tests/test_web_request_handler.py index 06f99be..4837cab 100644 --- a/tests/test_web_request_handler.py +++ b/tests/test_web_request_handler.py @@ -22,19 +22,21 @@ async def test_connections() -> None: manager = web.Server(serve) assert manager.connections == [] - handler = object() + handler = mock.Mock(spec_set=web.RequestHandler) + handler._task_handler = None transport = object() manager.connection_made(handler, transport) # type: ignore[arg-type] assert manager.connections == [handler] - manager.connection_lost(handler, None) # type: ignore[arg-type] + manager.connection_lost(handler, None) assert manager.connections == [] async def test_shutdown_no_timeout() -> None: manager = web.Server(serve) - handler = mock.Mock() + handler = mock.Mock(spec_set=web.RequestHandler) + handler._task_handler = None handler.shutdown = make_mocked_coro(mock.Mock()) transport = mock.Mock() manager.connection_made(handler, transport) diff --git a/tests/test_web_response.py b/tests/test_web_response.py index d1b407c..25e464f 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -1,8 +1,10 @@ import collections.abc import datetime import gzip +import io import json from concurrent.futures import ThreadPoolExecutor +from typing import AsyncIterator, Optional from unittest import mock import aiosignal @@ -13,7 +15,8 @@ from aiohttp import HttpVersion, HttpVersion10, HttpVersion11, hdrs from aiohttp.helpers import ETag from aiohttp.http_writer import StreamWriter, _serialize_headers -from aiohttp.payload import BytesPayload +from aiohttp.multipart import BodyPartReader, MultipartWriter +from aiohttp.payload import BytesPayload, StringPayload from aiohttp.test_utils import make_mocked_coro, make_mocked_request from aiohttp.web import ContentCoding, Response, StreamResponse, json_response @@ -663,6 +666,22 @@ async def write_headers(status_line, headers): assert resp.content_length is None +async def test_rm_content_length_if_204() -> None: + """Ensure content-length is removed for 204 responses.""" + writer = mock.create_autospec(StreamWriter, spec_set=True, instance=True) + + async def write_headers(status_line, headers): + assert hdrs.CONTENT_LENGTH not in headers + + writer.write_headers.side_effect = write_headers + req = make_request("GET", "/", writer=writer) + payload = BytesPayload(b"answer", headers={"Content-Length": "6"}) + resp = Response(body=payload, status=204) + resp.body = payload + await resp.prepare(req) + assert resp.content_length is None + + @pytest.mark.parametrize("status", (100, 101, 204, 304)) async def test_rm_transfer_encoding_rfc_9112_6_3_http_11(status: int) -> None: """Remove transfer encoding for RFC 9112 sec 6.3 with HTTP/1.1.""" @@ -773,11 +792,8 @@ async def test___repr___after_eof() -> None: resp = StreamResponse() await resp.prepare(make_request("GET", "/")) - assert resp.prepared - await resp.write(b"data") await resp.write_eof() - assert not resp.prepared resp_repr = repr(resp) assert resp_repr == "" @@ -916,6 +932,14 @@ def test_set_status_with_reason() -> None: assert "Everything is fine!" == resp.reason +def test_set_status_with_empty_reason() -> None: + resp = StreamResponse() + + resp.set_status(200, "") + assert resp.status == 200 + assert resp.reason == "" + + async def test_start_force_close() -> None: req = make_request("GET", "/") resp = StreamResponse() @@ -928,14 +952,14 @@ async def test_start_force_close() -> None: async def test___repr__() -> None: req = make_request("GET", "/path/to") - resp = StreamResponse(reason=301) + resp = StreamResponse(reason="foo") await resp.prepare(req) - assert "" == repr(resp) + assert "" == repr(resp) def test___repr___not_prepared() -> None: - resp = StreamResponse(reason=301) - assert "" == repr(resp) + resp = StreamResponse(reason="foo") + assert "" == repr(resp) async def test_keep_alive_http10_default() -> None: @@ -1122,6 +1146,48 @@ def test_assign_nonstr_text() -> None: assert 4 == resp.content_length +mpwriter = MultipartWriter(boundary="x") +mpwriter.append_payload(StringPayload("test")) + + +async def async_iter() -> AsyncIterator[str]: + yield "foo" # pragma: no cover + + +class CustomIO(io.IOBase): + def __init__(self): + self._lines = [b"", b"", b"test"] + + def read(self, size: int = -1) -> bytes: + return self._lines.pop() + + +@pytest.mark.parametrize( + "payload,expected", + ( + ("test", "test"), + (CustomIO(), "test"), + (io.StringIO("test"), "test"), + (io.TextIOWrapper(io.BytesIO(b"test")), "test"), + (io.BytesIO(b"test"), "test"), + (io.BufferedReader(io.BytesIO(b"test")), "test"), + (async_iter(), None), + (BodyPartReader("x", CIMultiDictProxy(CIMultiDict()), mock.Mock()), None), + ( + mpwriter, + "--x\nContent-Type: text/plain; charset=utf-8\r\nContent-Length: 4\r\n\r\ntest", + ), + ), +) +def test_payload_body_get_text(payload, expected: Optional[str]) -> None: + resp = Response(body=payload) + if expected is None: + with pytest.raises(TypeError): + resp.text + else: + assert resp.text == expected + + def test_response_set_content_length() -> None: resp = Response() with pytest.raises(RuntimeError): @@ -1139,7 +1205,6 @@ async def test_send_headers_for_empty_body(buf, writer) -> None: Matches( "HTTP/1.1 200 OK\r\n" "Content-Length: 0\r\n" - "Content-Type: application/octet-stream\r\n" "Date: .+\r\n" "Server: .+\r\n\r\n" ) @@ -1168,6 +1233,11 @@ async def test_render_with_body(buf, writer) -> None: ) +async def test_multiline_reason(buf, writer) -> None: + with pytest.raises(ValueError, match=r"Reason cannot contain \\n"): + Response(reason="Bad\r\nInjected-header: foo") + + async def test_send_set_cookie_header(buf, writer) -> None: resp = Response() resp.cookies["name"] = "value" @@ -1182,7 +1252,6 @@ async def test_send_set_cookie_header(buf, writer) -> None: "HTTP/1.1 200 OK\r\n" "Content-Length: 0\r\n" "Set-Cookie: name=value\r\n" - "Content-Type: application/octet-stream\r\n" "Date: .+\r\n" "Server: .+\r\n\r\n" ) @@ -1245,17 +1314,25 @@ def test_content_type_with_set_body() -> None: assert resp.content_type == "application/octet-stream" -def test_started_when_not_started() -> None: +def test_prepared_when_not_started() -> None: resp = StreamResponse() assert not resp.prepared -async def test_started_when_started() -> None: +async def test_prepared_when_started() -> None: resp = StreamResponse() await resp.prepare(make_request("GET", "/")) assert resp.prepared +async def test_prepared_after_eof() -> None: + resp = StreamResponse() + await resp.prepare(make_request("GET", "/")) + await resp.write(b"data") + await resp.write_eof() + assert resp.prepared + + async def test_drain_before_start() -> None: resp = StreamResponse() with pytest.raises(AssertionError): diff --git a/tests/test_web_runner.py b/tests/test_web_runner.py index c4843d2..b71c34f 100644 --- a/tests/test_web_runner.py +++ b/tests/test_web_runner.py @@ -16,7 +16,7 @@ def app(): @pytest.fixture -def make_runner(loop, app): +def make_runner(loop: Any, app: Any): asyncio.set_event_loop(loop) runners = [] @@ -30,7 +30,7 @@ def go(**kwargs): loop.run_until_complete(runner.cleanup()) -async def test_site_for_nonfrozen_app(make_runner) -> None: +async def test_site_for_nonfrozen_app(make_runner: Any) -> None: runner = make_runner() with pytest.raises(RuntimeError): web.TCPSite(runner) @@ -40,7 +40,7 @@ async def test_site_for_nonfrozen_app(make_runner) -> None: @pytest.mark.skipif( platform.system() == "Windows", reason="the test is not valid for Windows" ) -async def test_runner_setup_handle_signals(make_runner) -> None: +async def test_runner_setup_handle_signals(make_runner: Any) -> None: runner = make_runner(handle_signals=True) await runner.setup() assert signal.getsignal(signal.SIGTERM) is not signal.SIG_DFL @@ -51,7 +51,7 @@ async def test_runner_setup_handle_signals(make_runner) -> None: @pytest.mark.skipif( platform.system() == "Windows", reason="the test is not valid for Windows" ) -async def test_runner_setup_without_signal_handling(make_runner) -> None: +async def test_runner_setup_without_signal_handling(make_runner: Any) -> None: runner = make_runner(handle_signals=False) await runner.setup() assert signal.getsignal(signal.SIGTERM) is signal.SIG_DFL @@ -59,7 +59,7 @@ async def test_runner_setup_without_signal_handling(make_runner) -> None: assert signal.getsignal(signal.SIGTERM) is signal.SIG_DFL -async def test_site_double_added(make_runner) -> None: +async def test_site_double_added(make_runner: Any) -> None: _sock = get_unused_port_socket("127.0.0.1") runner = make_runner() await runner.setup() @@ -71,7 +71,7 @@ async def test_site_double_added(make_runner) -> None: assert len(runner.sites) == 1 -async def test_site_stop_not_started(make_runner) -> None: +async def test_site_stop_not_started(make_runner: Any) -> None: runner = make_runner() await runner.setup() site = web.TCPSite(runner) @@ -81,13 +81,13 @@ async def test_site_stop_not_started(make_runner) -> None: assert len(runner.sites) == 0 -async def test_custom_log_format(make_runner) -> None: +async def test_custom_log_format(make_runner: Any) -> None: runner = make_runner(access_log_format="abc") await runner.setup() assert runner.server._kwargs["access_log_format"] == "abc" -async def test_unreg_site(make_runner) -> None: +async def test_unreg_site(make_runner: Any) -> None: runner = make_runner() await runner.setup() site = web.TCPSite(runner) @@ -95,7 +95,7 @@ async def test_unreg_site(make_runner) -> None: runner._unreg_site(site) -async def test_app_property(make_runner, app) -> None: +async def test_app_property(make_runner: Any, app: Any) -> None: runner = make_runner() assert runner.app is app @@ -121,7 +121,9 @@ async def test_addresses(make_runner, unix_sockname) -> None: @pytest.mark.skipif( platform.system() != "Windows", reason="Proactor Event loop present only in Windows" ) -async def test_named_pipe_runner_wrong_loop(app, selector_loop, pipe_name) -> None: +async def test_named_pipe_runner_wrong_loop( + app: Any, selector_loop: Any, pipe_name: Any +) -> None: runner = web.AppRunner(app) await runner.setup() with pytest.raises(RuntimeError): @@ -131,7 +133,9 @@ async def test_named_pipe_runner_wrong_loop(app, selector_loop, pipe_name) -> No @pytest.mark.skipif( platform.system() != "Windows", reason="Proactor Event loop present only in Windows" ) -async def test_named_pipe_runner_proactor_loop(proactor_loop, app, pipe_name) -> None: +async def test_named_pipe_runner_proactor_loop( + proactor_loop: Any, app: Any, pipe_name: Any +) -> None: runner = web.AppRunner(app) await runner.setup() pipe = web.NamedPipeSite(runner, pipe_name) @@ -139,7 +143,7 @@ async def test_named_pipe_runner_proactor_loop(proactor_loop, app, pipe_name) -> await runner.cleanup() -async def test_tcpsite_default_host(make_runner): +async def test_tcpsite_default_host(make_runner: Any) -> None: runner = make_runner() await runner.setup() site = web.TCPSite(runner) @@ -161,6 +165,13 @@ async def mock_create_server(*args, **kwargs): assert port == 8080 +async def test_tcpsite_empty_str_host(make_runner: Any) -> None: + runner = make_runner() + await runner.setup() + site = web.TCPSite(runner, host="") + assert site.name == "http://0.0.0.0:8080" + + def test_run_after_asyncio_run() -> None: async def nothing(): pass diff --git a/tests/test_web_sendfile.py b/tests/test_web_sendfile.py index d472c40..58a46ec 100644 --- a/tests/test_web_sendfile.py +++ b/tests/test_web_sendfile.py @@ -1,10 +1,13 @@ from pathlib import Path +from stat import S_IFREG, S_IRUSR, S_IWUSR from unittest import mock from aiohttp import hdrs from aiohttp.test_utils import make_mocked_coro, make_mocked_request from aiohttp.web_fileresponse import FileResponse +MOCK_MODE = S_IFREG | S_IRUSR | S_IWUSR + def test_using_gzip_if_header_present_and_file_available(loop) -> None: request = make_mocked_request( @@ -15,12 +18,13 @@ def test_using_gzip_if_header_present_and_file_available(loop) -> None: ) gz_filepath = mock.create_autospec(Path, spec_set=True) - gz_filepath.stat.return_value.st_size = 1024 - gz_filepath.stat.return_value.st_mtime_ns = 1603733507222449291 + gz_filepath.lstat.return_value.st_size = 1024 + gz_filepath.lstat.return_value.st_mtime_ns = 1603733507222449291 + gz_filepath.lstat.return_value.st_mode = MOCK_MODE filepath = mock.create_autospec(Path, spec_set=True) filepath.name = "logo.png" - filepath.with_name.return_value = gz_filepath + filepath.with_suffix.return_value = gz_filepath file_sender = FileResponse(filepath) file_sender._path = filepath @@ -36,14 +40,16 @@ def test_gzip_if_header_not_present_and_file_available(loop) -> None: request = make_mocked_request("GET", "http://python.org/logo.png", headers={}) gz_filepath = mock.create_autospec(Path, spec_set=True) - gz_filepath.stat.return_value.st_size = 1024 - gz_filepath.stat.return_value.st_mtime_ns = 1603733507222449291 + gz_filepath.lstat.return_value.st_size = 1024 + gz_filepath.lstat.return_value.st_mtime_ns = 1603733507222449291 + gz_filepath.lstat.return_value.st_mode = MOCK_MODE filepath = mock.create_autospec(Path, spec_set=True) filepath.name = "logo.png" - filepath.with_name.return_value = gz_filepath + filepath.with_suffix.return_value = gz_filepath filepath.stat.return_value.st_size = 1024 filepath.stat.return_value.st_mtime_ns = 1603733507222449291 + filepath.stat.return_value.st_mode = MOCK_MODE file_sender = FileResponse(filepath) file_sender._path = filepath @@ -63,9 +69,10 @@ def test_gzip_if_header_not_present_and_file_not_available(loop) -> None: filepath = mock.create_autospec(Path, spec_set=True) filepath.name = "logo.png" - filepath.with_name.return_value = gz_filepath + filepath.with_suffix.return_value = gz_filepath filepath.stat.return_value.st_size = 1024 filepath.stat.return_value.st_mtime_ns = 1603733507222449291 + filepath.stat.return_value.st_mode = MOCK_MODE file_sender = FileResponse(filepath) file_sender._path = filepath @@ -83,13 +90,14 @@ def test_gzip_if_header_present_and_file_not_available(loop) -> None: ) gz_filepath = mock.create_autospec(Path, spec_set=True) - gz_filepath.stat.side_effect = OSError(2, "No such file or directory") + gz_filepath.lstat.side_effect = OSError(2, "No such file or directory") filepath = mock.create_autospec(Path, spec_set=True) filepath.name = "logo.png" - filepath.with_name.return_value = gz_filepath + filepath.with_suffix.return_value = gz_filepath filepath.stat.return_value.st_size = 1024 filepath.stat.return_value.st_mtime_ns = 1603733507222449291 + filepath.stat.return_value.st_mode = MOCK_MODE file_sender = FileResponse(filepath) file_sender._path = filepath @@ -108,6 +116,7 @@ def test_status_controlled_by_user(loop) -> None: filepath.name = "logo.png" filepath.stat.return_value.st_size = 1024 filepath.stat.return_value.st_mtime_ns = 1603733507222449291 + filepath.stat.return_value.st_mode = MOCK_MODE file_sender = FileResponse(filepath, status=203) file_sender._path = filepath diff --git a/tests/test_web_sendfile_functional.py b/tests/test_web_sendfile_functional.py index 57ac084..256cf4d 100644 --- a/tests/test_web_sendfile_functional.py +++ b/tests/test_web_sendfile_functional.py @@ -1,15 +1,22 @@ import asyncio +import bz2 import gzip import pathlib import socket import zlib from typing import Any, Iterable, Optional +from unittest import mock import pytest import aiohttp from aiohttp import web +try: + import brotlicffi as brotli +except ImportError: + import brotli + try: import ssl except ImportError: @@ -27,22 +34,20 @@ def hello_txt(request, tmp_path_factory) -> pathlib.Path: indirect parameter can be passed with an encoding to get a compressed path. """ txt = tmp_path_factory.mktemp("hello-") / "hello.txt" - hello = {None: txt, "gzip": txt.with_suffix(f"{txt.suffix}.gz")} - hello[None].write_bytes(HELLO_AIOHTTP) + hello = { + None: txt, + "gzip": txt.with_suffix(f"{txt.suffix}.gz"), + "br": txt.with_suffix(f"{txt.suffix}.br"), + "bzip2": txt.with_suffix(f"{txt.suffix}.bz2"), + } + # Uncompressed file is not actually written to test it is not required. hello["gzip"].write_bytes(gzip.compress(HELLO_AIOHTTP)) + hello["br"].write_bytes(brotli.compress(HELLO_AIOHTTP)) + hello["bzip2"].write_bytes(bz2.compress(HELLO_AIOHTTP)) encoding = getattr(request, "param", None) return hello[encoding] -@pytest.fixture -def loop_without_sendfile(loop): - def sendfile(*args, **kwargs): - raise NotImplementedError - - loop.sendfile = sendfile - return loop - - @pytest.fixture def loop_with_mocked_native_sendfile(loop: Any): def sendfile(transport, fobj, offset, count): @@ -55,14 +60,27 @@ def sendfile(transport, fobj, offset, count): @pytest.fixture(params=["sendfile", "no_sendfile"], ids=["sendfile", "no_sendfile"]) -def sender(request, loop_without_sendfile): +def sender(request: Any, loop: Any): + sendfile_mock = None + def maker(*args, **kwargs): ret = web.FileResponse(*args, **kwargs) - if request.param == "no_sendfile": - asyncio.set_event_loop(loop_without_sendfile) + rloop = asyncio.get_running_loop() + is_patched = rloop.sendfile is sendfile_mock + assert is_patched if request.param == "no_sendfile" else not is_patched return ret - return maker + if request.param == "no_sendfile": + with mock.patch.object( + loop, + "sendfile", + autospec=True, + spec_set=True, + side_effect=NotImplementedError, + ) as sendfile_mock: + yield maker + else: + yield maker @pytest.fixture @@ -220,7 +238,7 @@ async def handler(request): await client.close() -@pytest.mark.parametrize("hello_txt", ["gzip"], indirect=True) +@pytest.mark.parametrize("hello_txt", ["gzip", "br"], indirect=True) async def test_static_file_custom_content_type( hello_txt: pathlib.Path, aiohttp_client: Any, sender: Any ) -> None: @@ -245,8 +263,16 @@ async def handler(request): await client.close() +@pytest.mark.parametrize( + ("accept_encoding", "expect_encoding"), + [("gzip, deflate", "gzip"), ("gzip, deflate, br", "br")], +) async def test_static_file_custom_content_type_compress( - hello_txt: pathlib.Path, aiohttp_client: Any, sender: Any + hello_txt: pathlib.Path, + aiohttp_client: Any, + sender: Any, + accept_encoding: str, + expect_encoding: str, ): """Test that custom type with encoding is returned for unencoded requests.""" @@ -259,9 +285,9 @@ async def handler(request): app.router.add_get("/", handler) client = await aiohttp_client(app) - resp = await client.get("/") + resp = await client.get("/", headers={"Accept-Encoding": accept_encoding}) assert resp.status == 200 - assert resp.headers.get("Content-Encoding") == "gzip" + assert resp.headers.get("Content-Encoding") == expect_encoding assert resp.headers["Content-Type"] == "application/pdf" assert await resp.read() == HELLO_AIOHTTP resp.close() @@ -269,11 +295,17 @@ async def handler(request): await client.close() +@pytest.mark.parametrize( + ("accept_encoding", "expect_encoding"), + [("gzip, deflate", "gzip"), ("gzip, deflate, br", "br")], +) @pytest.mark.parametrize("forced_compression", [None, web.ContentCoding.gzip]) async def test_static_file_with_encoding_and_enable_compression( hello_txt: pathlib.Path, aiohttp_client: Any, sender: Any, + accept_encoding: str, + expect_encoding: str, forced_compression: Optional[web.ContentCoding], ): """Test that enable_compression does not double compress when an encoded file is also present.""" @@ -287,9 +319,9 @@ async def handler(request): app.router.add_get("/", handler) client = await aiohttp_client(app) - resp = await client.get("/") + resp = await client.get("/", headers={"Accept-Encoding": accept_encoding}) assert resp.status == 200 - assert resp.headers.get("Content-Encoding") == "gzip" + assert resp.headers.get("Content-Encoding") == expect_encoding assert resp.headers["Content-Type"] == "text/plain" assert await resp.read() == HELLO_AIOHTTP resp.close() @@ -298,10 +330,16 @@ async def handler(request): @pytest.mark.parametrize( - ("hello_txt", "expect_encoding"), [["gzip"] * 2], indirect=["hello_txt"] + ("hello_txt", "expect_type"), + [ + ("gzip", "application/gzip"), + ("br", "application/x-brotli"), + ("bzip2", "application/x-bzip2"), + ], + indirect=["hello_txt"], ) async def test_static_file_with_content_encoding( - hello_txt: pathlib.Path, aiohttp_client: Any, sender: Any, expect_encoding: str + hello_txt: pathlib.Path, aiohttp_client: Any, sender: Any, expect_type: str ) -> None: """Test requesting static compressed files returns the correct content type and encoding.""" @@ -314,9 +352,9 @@ async def handler(request): resp = await client.get("/") assert resp.status == 200 - assert resp.headers.get("Content-Encoding") == expect_encoding - assert resp.headers["Content-Type"] == "text/plain" - assert await resp.read() == HELLO_AIOHTTP + assert resp.headers.get("Content-Encoding") is None + assert resp.headers["Content-Type"] == expect_type + assert await resp.read() == hello_txt.read_bytes() resp.close() await resp.release() @@ -475,10 +513,9 @@ async def test_static_file_if_none_match( resp = await client.get("/") assert 200 == resp.status - original_etag = resp.headers.get("ETag") + original_etag = resp.headers["ETag"] assert resp.headers.get("Last-Modified") is not None - assert original_etag is not None resp.close() await resp.release() @@ -517,6 +554,39 @@ async def test_static_file_if_none_match_star( await client.close() +@pytest.mark.parametrize("if_modified_since", ("", "Fri, 31 Dec 9999 23:59:59 GMT")) +async def test_static_file_if_none_match_weak( + aiohttp_client: Any, + app_with_static_route: web.Application, + if_modified_since: str, +) -> None: + client = await aiohttp_client(app_with_static_route) + + resp = await client.get("/") + assert 200 == resp.status + original_etag = resp.headers["ETag"] + + assert resp.headers.get("Last-Modified") is not None + resp.close() + resp.release() + + weak_etag = f"W/{original_etag}" + + resp = await client.get( + "/", + headers={"If-None-Match": weak_etag, "If-Modified-Since": if_modified_since}, + ) + body = await resp.read() + assert 304 == resp.status + assert resp.headers.get("Content-Length") is None + assert resp.headers.get("ETag") == original_etag + assert b"" == body + resp.close() + resp.release() + + await client.close() + + @pytest.mark.skipif(not ssl, reason="ssl not supported") async def test_static_file_ssl( aiohttp_server, @@ -571,15 +641,6 @@ async def test_static_file_directory_traversal_attack(aiohttp_client) -> None: await client.close() -def test_static_route_path_existence_check() -> None: - directory = pathlib.Path(__file__).parent - web.StaticResource("/", directory) - - nodirectory = directory / "nonexistent-uPNiOEAg5d" - with pytest.raises(ValueError): - web.StaticResource("/", nodirectory) - - async def test_static_file_huge(aiohttp_client, tmp_path) -> None: file_path = tmp_path / "huge_data.unknown_mime_type" diff --git a/tests/test_web_server.py b/tests/test_web_server.py index d0fd95a..14d78e2 100644 --- a/tests/test_web_server.py +++ b/tests/test_web_server.py @@ -4,7 +4,7 @@ import pytest -from aiohttp import client, helpers, web +from aiohttp import client, web async def test_simple_server(aiohttp_raw_server, aiohttp_client) -> None: @@ -19,12 +19,6 @@ async def handler(request): assert txt == "/path/to" -@pytest.mark.xfail( - not helpers.NO_EXTENSIONS, - raises=client.ServerDisconnectedError, - reason="The behavior of C-extensions differs from pure-Python: " - "https://github.com/aio-libs/aiohttp/issues/6446", -) async def test_unsupported_upgrade(aiohttp_raw_server, aiohttp_client) -> None: # don't fail if a client probes for an unsupported protocol upgrade # https://github.com/aio-libs/aiohttp/issues/6446#issuecomment-999032039 diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 0441890..eca365d 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -1,17 +1,18 @@ import asyncio import functools +import os import pathlib +import socket import sys -from typing import Optional -from unittest import mock -from unittest.mock import MagicMock +from stat import S_IFIFO, S_IMODE +from typing import Any, Generator, NoReturn, Optional import pytest import yarl from aiohttp import abc, web from aiohttp.pytest_plugin import AiohttpClient -from aiohttp.web_urldispatcher import SystemRoute +from aiohttp.web_urldispatcher import Resource, SystemRoute @pytest.mark.parametrize( @@ -330,7 +331,6 @@ async def test_access_to_the_file_with_spaces( r = await client.get(url) assert r.status == 200 assert (await r.text()) == data - await r.release() async def test_access_non_existing_resource( @@ -380,7 +380,7 @@ async def test_handler_metadata_persistence() -> None: async def async_handler(request: web.Request) -> web.Response: """Doc""" - return web.Response() + return web.Response() # pragma: no cover def sync_handler(request): """Doc""" @@ -395,31 +395,111 @@ def sync_handler(request): assert route.handler.__doc__ == "Doc" -async def test_unauthorized_folder_access( - tmp_path: pathlib.Path, aiohttp_client: AiohttpClient +@pytest.mark.skipif( + sys.platform.startswith("win32"), reason="Cannot remove read access on Windows" +) +@pytest.mark.parametrize("file_request", ["", "my_file.txt"]) +async def test_static_directory_without_read_permission( + tmp_path: pathlib.Path, aiohttp_client: AiohttpClient, file_request: str +) -> None: + """Test static directory without read permission receives forbidden response.""" + my_dir = tmp_path / "my_dir" + my_dir.mkdir() + my_dir.chmod(0o000) + + app = web.Application() + app.router.add_static("/", str(tmp_path), show_index=True) + client = await aiohttp_client(app) + + r = await client.get(f"/{my_dir.name}/{file_request}") + assert r.status == 403 + + +@pytest.mark.parametrize("file_request", ["", "my_file.txt"]) +async def test_static_directory_with_mock_permission_error( + monkeypatch: pytest.MonkeyPatch, + tmp_path: pathlib.Path, + aiohttp_client: AiohttpClient, + file_request: str, ) -> None: - # Tests the unauthorized access to a folder of static file server. - # Try to list a folder content of static file server when server does not - # have permissions to do so for the folder. + """Test static directory with mock permission errors receives forbidden response.""" my_dir = tmp_path / "my_dir" my_dir.mkdir() + real_iterdir = pathlib.Path.iterdir + real_is_dir = pathlib.Path.is_dir + + def mock_iterdir(self: pathlib.Path) -> Generator[pathlib.Path, None, None]: + if my_dir.samefile(self): + raise PermissionError() + return real_iterdir(self) + + def mock_is_dir(self: pathlib.Path, **kwargs: Any) -> bool: + if my_dir.samefile(self.parent): + raise PermissionError() + return real_is_dir(self, **kwargs) + + monkeypatch.setattr("pathlib.Path.iterdir", mock_iterdir) + monkeypatch.setattr("pathlib.Path.is_dir", mock_is_dir) + app = web.Application() + app.router.add_static("/", str(tmp_path), show_index=True) + client = await aiohttp_client(app) + + r = await client.get("/") + assert r.status == 200 + r = await client.get(f"/{my_dir.name}/{file_request}") + assert r.status == 403 + - with mock.patch("pathlib.Path.__new__") as path_constructor: - path = MagicMock() - path.joinpath.return_value = path - path.resolve.return_value = path - path.iterdir.return_value.__iter__.side_effect = PermissionError() - path_constructor.return_value = path +@pytest.mark.skipif( + sys.platform.startswith("win32"), reason="Cannot remove read access on Windows" +) +async def test_static_file_without_read_permission( + tmp_path: pathlib.Path, aiohttp_client: AiohttpClient +) -> None: + """Test static file without read permission receives forbidden response.""" + my_file = tmp_path / "my_file.txt" + my_file.write_text("secret") + my_file.chmod(0o000) - # Register global static route: - app.router.add_static("/", str(tmp_path), show_index=True) - client = await aiohttp_client(app) + app = web.Application() + app.router.add_static("/", str(tmp_path)) + client = await aiohttp_client(app) - # Request the root of the static directory. - r = await client.get("/" + my_dir.name) - assert r.status == 403 + r = await client.get(f"/{my_file.name}") + assert r.status == 403 + + +async def test_static_file_with_mock_permission_error( + monkeypatch: pytest.MonkeyPatch, + tmp_path: pathlib.Path, + aiohttp_client: AiohttpClient, +) -> None: + """Test static file with mock permission errors receives forbidden response.""" + my_file = tmp_path / "my_file.txt" + my_file.write_text("secret") + my_readable = tmp_path / "my_readable.txt" + my_readable.write_text("info") + + real_open = pathlib.Path.open + + def mock_open(self: pathlib.Path, *args: Any, **kwargs: Any) -> Any: + if my_file.samefile(self): + raise PermissionError() + return real_open(self, *args, **kwargs) + + monkeypatch.setattr("pathlib.Path.open", mock_open) + + app = web.Application() + app.router.add_static("/", str(tmp_path)) + client = await aiohttp_client(app) + + # Test the mock only applies to my_file, then test the permission error. + r = await client.get(f"/{my_readable.name}") + assert r.status == 200 + r = await client.get(f"/{my_file.name}") + assert r.status == 403 async def test_access_symlink_loop( @@ -440,33 +520,87 @@ async def test_access_symlink_loop( assert r.status == 404 -async def test_access_special_resource( +async def test_access_compressed_file_as_symlink( tmp_path: pathlib.Path, aiohttp_client: AiohttpClient ) -> None: - # Tests the access to a resource that is neither a file nor a directory. - # Checks that if a special resource is accessed (f.e. named pipe or UNIX - # domain socket) then 404 HTTP status returned. + """Test that compressed file variants as symlinks are ignored.""" + private_file = tmp_path / "private.txt" + private_file.write_text("private info") + www_dir = tmp_path / "www" + www_dir.mkdir() + gz_link = www_dir / "file.txt.gz" + gz_link.symlink_to(f"../{private_file.name}") + app = web.Application() + app.router.add_static("/", www_dir) + client = await aiohttp_client(app) - with mock.patch("pathlib.Path.__new__") as path_constructor: - special = MagicMock() - special.is_dir.return_value = False - special.is_file.return_value = False + # Symlink should be ignored; response reflects missing uncompressed file. + resp = await client.get(f"/{gz_link.stem}", auto_decompress=False) + assert resp.status == 404 + resp.release() + + # Again symlin is ignored, and then uncompressed is served. + txt_file = gz_link.with_suffix("") + txt_file.write_text("public data") + resp = await client.get(f"/{txt_file.name}") + assert resp.status == 200 + assert resp.headers.get("Content-Encoding") is None + assert resp.content_type == "text/plain" + assert await resp.text() == "public data" + resp.release() + await client.close() - path = MagicMock() - path.joinpath.side_effect = lambda p: (special if p == "special" else path) - path.resolve.return_value = path - special.resolve.return_value = special - path_constructor.return_value = path +async def test_access_special_resource( + tmp_path_factory: pytest.TempPathFactory, aiohttp_client: AiohttpClient +) -> None: + """Test access to non-regular files is forbidden using a UNIX domain socket.""" + if not getattr(socket, "AF_UNIX", None): + pytest.skip("UNIX domain sockets not supported") - # Register global static route: - app.router.add_static("/", str(tmp_path), show_index=True) - client = await aiohttp_client(app) + tmp_path = tmp_path_factory.mktemp("special") + my_special = tmp_path / "sock" + my_socket = socket.socket(socket.AF_UNIX) + my_socket.bind(str(my_special)) + assert my_special.is_socket() + + app = web.Application() + app.router.add_static("/", str(tmp_path)) + + client = await aiohttp_client(app) + r = await client.get(f"/{my_special.name}") + assert r.status == 403 + my_socket.close() + + +async def test_access_mock_special_resource( + monkeypatch: pytest.MonkeyPatch, + tmp_path: pathlib.Path, + aiohttp_client: AiohttpClient, +) -> None: + """Test access to non-regular files is forbidden using a mock FIFO.""" + my_special = tmp_path / "my_special" + my_special.touch() + + real_result = my_special.stat() + real_stat = pathlib.Path.stat + + def mock_stat(self: pathlib.Path, **kwargs: Any) -> os.stat_result: + s = real_stat(self, **kwargs) + if os.path.samestat(s, real_result): + mock_mode = S_IFIFO | S_IMODE(s.st_mode) + s = os.stat_result([mock_mode] + list(s)[1:]) + return s + + monkeypatch.setattr("pathlib.Path.stat", mock_stat) + + app = web.Application() + app.router.add_static("/", str(tmp_path)) + client = await aiohttp_client(app) - # Request the root of the static directory. - r = await client.get("/special") - assert r.status == 403 + r = await client.get(f"/{my_special.name}") + assert r.status == 403 async def test_partially_applied_handler(aiohttp_client: AiohttpClient) -> None: @@ -569,10 +703,11 @@ async def handler(request: web.Request) -> web.Response: @pytest.mark.parametrize( "path", - [ + ( "/a", "/{a}", - ], + "/{a:.*}", + ), ) def test_reuse_last_added_resource(path: str) -> None: # Test that adding a route with the same name and path of the last added @@ -580,7 +715,7 @@ def test_reuse_last_added_resource(path: str) -> None: app = web.Application() async def handler(request: web.Request) -> web.Response: - return web.Response() + return web.Response() # pragma: no cover app.router.add_get(path, handler, name="a") app.router.add_post(path, handler, name="a") @@ -592,7 +727,7 @@ def test_resource_raw_match() -> None: app = web.Application() async def handler(request: web.Request) -> web.Response: - return web.Response() + return web.Response() # pragma: no cover route = app.router.add_get("/a", handler, name="a") assert route.resource is not None @@ -722,18 +857,15 @@ async def get_foobar(request: web.Request) -> web.Response: assert (await resp.text()) == "success!" -@pytest.mark.xfail( - raises=AssertionError, - reason="Regression in v3.7: https://github.com/aio-libs/aiohttp/issues/5621", -) @pytest.mark.parametrize( ("route_definition", "urlencoded_path", "expected_http_resp_status"), ( ("/467,802,24834/hello", "/467%2C802%2C24834/hello", 200), ("/{user_ids:([0-9]+)(,([0-9]+))*}/hello", "/467%2C802%2C24834/hello", 200), + ("/467,802,24834/hello", "/467,802,24834/hello", 200), + ("/{user_ids:([0-9]+)(,([0-9]+))*}/hello", "/467,802,24834/hello", 200), ("/1%2C3/hello", "/1%2C3/hello", 404), ), - ids=("urldecoded_route", "urldecoded_route_with_regex", "urlencoded_route"), ) async def test_decoded_url_match( aiohttp_client: AiohttpClient, @@ -749,6 +881,128 @@ async def handler(request: web.Request) -> web.Response: app.router.add_get(route_definition, handler) client = await aiohttp_client(app) - r = await client.get(yarl.URL(urlencoded_path, encoded=True)) - assert r.status == expected_http_resp_status + async with client.get(yarl.URL(urlencoded_path, encoded=True)) as resp: + assert resp.status == expected_http_resp_status + + +async def test_decoded_raw_match_regex(aiohttp_client: AiohttpClient) -> None: + """Verify that raw_match only matches decoded url.""" + app = web.Application() + + async def handler(request: web.Request) -> NoReturn: + assert False + + app.router.add_get("/467%2C802%2C24834%2C24952%2C25362%2C40574/hello", handler) + client = await aiohttp_client(app) + + async with client.get( + yarl.URL("/467%2C802%2C24834%2C24952%2C25362%2C40574/hello", encoded=True) + ) as resp: + assert resp.status == 404 # should only match decoded url + + +async def test_order_is_preserved(aiohttp_client: AiohttpClient) -> None: + """Test route order is preserved. + + Note that fixed/static paths are always preferred over a regex path. + """ + app = web.Application() + + async def handler(request: web.Request) -> web.Response: + assert isinstance(request.match_info._route.resource, Resource) + return web.Response(text=request.match_info._route.resource.canonical) + + app.router.add_get("/first/x/{b}/", handler) + app.router.add_get(r"/first/{x:.*/b}", handler) + + app.router.add_get(r"/second/{user}/info", handler) + app.router.add_get("/second/bob/info", handler) + + app.router.add_get("/third/bob/info", handler) + app.router.add_get(r"/third/{user}/info", handler) + + app.router.add_get(r"/forth/{name:\d+}", handler) + app.router.add_get("/forth/42", handler) + + app.router.add_get("/fifth/42", handler) + app.router.add_get(r"/fifth/{name:\d+}", handler) + + client = await aiohttp_client(app) + + r = await client.get("/first/x/b/") + assert r.status == 200 + assert await r.text() == "/first/x/{b}/" + + r = await client.get("/second/frank/info") + assert r.status == 200 + assert await r.text() == "/second/{user}/info" + + # Fixed/static paths are always preferred over regex paths + r = await client.get("/second/bob/info") + assert r.status == 200 + assert await r.text() == "/second/bob/info" + + r = await client.get("/third/bob/info") + assert r.status == 200 + assert await r.text() == "/third/bob/info" + + r = await client.get("/third/frank/info") + assert r.status == 200 + assert await r.text() == "/third/{user}/info" + + r = await client.get("/forth/21") + assert r.status == 200 + assert await r.text() == "/forth/{name}" + + # Fixed/static paths are always preferred over regex paths + r = await client.get("/forth/42") + assert r.status == 200 + assert await r.text() == "/forth/42" + + r = await client.get("/fifth/21") + assert r.status == 200 + assert await r.text() == "/fifth/{name}" + + r = await client.get("/fifth/42") + assert r.status == 200 + assert await r.text() == "/fifth/42" + + +async def test_url_with_many_slashes(aiohttp_client: AiohttpClient) -> None: + app = web.Application() + + class MyView(web.View): + async def get(self) -> web.Response: + return web.Response() + + app.router.add_routes([web.view("/a", MyView)]) + + client = await aiohttp_client(app) + + r = await client.get("///a") + assert r.status == 200 await r.release() + + +async def test_route_with_regex(aiohttp_client: AiohttpClient) -> None: + """Test a route with a regex preceded by a fixed string.""" + app = web.Application() + + async def handler(request: web.Request) -> web.Response: + assert isinstance(request.match_info._route.resource, Resource) + return web.Response(text=request.match_info._route.resource.canonical) + + app.router.add_get("/core/locations{tail:.*}", handler) + client = await aiohttp_client(app) + + r = await client.get("/core/locations/tail/here") + assert r.status == 200 + assert await r.text() == "/core/locations{tail}" + + r = await client.get("/core/locations_tail_here") + assert r.status == 200 + assert await r.text() == "/core/locations{tail}" + + r = await client.get("/core/locations_tail;id=abcdef") + assert r.status == 200 + assert await r.text() == "/core/locations{tail}" diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index b471b13..42faff8 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -3,13 +3,16 @@ import asyncio import contextlib import sys -from typing import Any, Optional +import weakref +from typing import Any, NoReturn, Optional +from unittest import mock import pytest import aiohttp from aiohttp import web from aiohttp.http import WSCloseCode, WSMsgType +from aiohttp.pytest_plugin import AiohttpClient async def test_websocket_can_prepare(loop, aiohttp_client) -> None: @@ -313,6 +316,47 @@ async def handler(request): assert msg.type == WSMsgType.CLOSED +async def test_concurrent_close_multiple_tasks(loop: Any, aiohttp_client: Any) -> None: + srv_ws = None + + async def handler(request): + nonlocal srv_ws + ws = srv_ws = web.WebSocketResponse(autoclose=False, protocols=("foo", "bar")) + await ws.prepare(request) + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSING + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSING + + await asyncio.sleep(0) + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSED + + return ws + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + ws = await client.ws_connect("/", autoclose=False, protocols=("eggs", "bar")) + + task1 = asyncio.create_task(srv_ws.close(code=WSCloseCode.INVALID_TEXT)) + task2 = asyncio.create_task(srv_ws.close(code=WSCloseCode.INVALID_TEXT)) + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSE + + await task1 + await task2 + + await asyncio.sleep(0) + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSED + + async def test_close_op_code_from_client(loop: Any, aiohttp_client: Any) -> None: srv_ws: Optional[web.WebSocketResponse] = None @@ -681,7 +725,134 @@ async def handler(request): await ws.close() -async def test_server_ws_async_for(loop, aiohttp_server) -> None: +async def test_heartbeat_connection_closed( + loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient +) -> None: + """Test that the connection is closed while ping is in progress.""" + ping_count = 0 + + async def handler(request: web.Request) -> NoReturn: + nonlocal ping_count + ws_server = web.WebSocketResponse(heartbeat=0.05) + await ws_server.prepare(request) + # We patch write here to simulate a connection reset error + # since if we closed the connection normally, the server would + # would cancel the heartbeat task and we wouldn't get a ping + with mock.patch.object( + ws_server._req.transport, "write", side_effect=ConnectionResetError + ), mock.patch.object( + ws_server._writer, "ping", wraps=ws_server._writer.ping + ) as ping: + try: + await ws_server.receive() + finally: + ping_count = ping.call_count + assert False + + app = web.Application() + app.router.add_get("/", handler) + + client = await aiohttp_client(app) + ws = await client.ws_connect("/", autoping=False) + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.CLOSED + assert msg.extra is None + assert ws.close_code == WSCloseCode.ABNORMAL_CLOSURE + assert ping_count == 1 + await ws.close() + + +async def test_heartbeat_failure_ends_receive( + loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient +) -> None: + """Test that no heartbeat response to the server ends the receive call.""" + ws_server_close_code = None + ws_server_exception = None + + async def handler(request: web.Request) -> NoReturn: + nonlocal ws_server_close_code, ws_server_exception + ws_server = web.WebSocketResponse(heartbeat=0.05) + await ws_server.prepare(request) + try: + await ws_server.receive() + finally: + ws_server_close_code = ws_server.close_code + ws_server_exception = ws_server.exception() + assert False + + app = web.Application() + app.router.add_get("/", handler) + + client = await aiohttp_client(app) + ws = await client.ws_connect("/", autoping=False) + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.PING + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.CLOSED + assert ws.close_code == WSCloseCode.ABNORMAL_CLOSURE + assert ws_server_close_code == WSCloseCode.ABNORMAL_CLOSURE + assert isinstance(ws_server_exception, asyncio.TimeoutError) + await ws.close() + + +async def test_heartbeat_no_pong_send_many_messages( + loop: Any, aiohttp_client: Any +) -> None: + """Test no pong after sending many messages.""" + + async def handler(request): + ws = web.WebSocketResponse(heartbeat=0.05) + await ws.prepare(request) + for _ in range(10): + await ws.send_str("test") + + await ws.receive() + return ws + + app = web.Application() + app.router.add_get("/", handler) + + client = await aiohttp_client(app) + ws = await client.ws_connect("/", autoping=False) + for _ in range(10): + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.TEXT + assert msg.data == "test" + + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.PING + await ws.close() + + +async def test_heartbeat_no_pong_receive_many_messages( + loop: Any, aiohttp_client: Any +) -> None: + """Test no pong after receiving many messages.""" + + async def handler(request): + ws = web.WebSocketResponse(heartbeat=0.05) + await ws.prepare(request) + for _ in range(10): + server_msg = await ws.receive() + assert server_msg.type is aiohttp.WSMsgType.TEXT + + await ws.receive() + return ws + + app = web.Application() + app.router.add_get("/", handler) + + client = await aiohttp_client(app) + ws = await client.ws_connect("/", autoping=False) + for _ in range(10): + await ws.send_str("test") + + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.PING + await ws.close() + + +async def test_server_ws_async_for(loop: Any, aiohttp_server: Any) -> None: closed = loop.create_future() async def handler(request): @@ -890,3 +1061,103 @@ async def handler(request): await ws.close(code=WSCloseCode.OK, message="exit message") await closed + + +async def test_websocket_shutdown(aiohttp_client: AiohttpClient) -> None: + """Test that the client websocket gets the close message when the server is shutting down.""" + url = "/ws" + app = web.Application() + websockets = web.AppKey("websockets", weakref.WeakSet) + app[websockets] = weakref.WeakSet() + + # need for send signal shutdown server + shutdown_websockets = web.AppKey("shutdown_websockets", weakref.WeakSet) + app[shutdown_websockets] = weakref.WeakSet() + + async def websocket_handler(request: web.Request) -> web.WebSocketResponse: + websocket = web.WebSocketResponse() + await websocket.prepare(request) + request.app[websockets].add(websocket) + request.app[shutdown_websockets].add(websocket) + + try: + async for message in websocket: + await websocket.send_json({"ok": True, "message": message.json()}) + finally: + request.app[websockets].discard(websocket) + + return websocket + + async def on_shutdown(app: web.Application) -> None: + while app[shutdown_websockets]: + websocket = app[shutdown_websockets].pop() + await websocket.close( + code=aiohttp.WSCloseCode.GOING_AWAY, + message="Server shutdown", + ) + + app.router.add_get(url, websocket_handler) + app.on_shutdown.append(on_shutdown) + + client = await aiohttp_client(app) + + websocket = await client.ws_connect(url) + + message = {"message": "hi"} + await websocket.send_json(message) + reply = await websocket.receive_json() + assert reply == {"ok": True, "message": message} + + await app.shutdown() + + assert websocket.closed is False + + reply = await websocket.receive() + + assert reply.type is aiohttp.http.WSMsgType.CLOSE + assert reply.data == aiohttp.WSCloseCode.GOING_AWAY + assert reply.extra == "Server shutdown" + + assert websocket.closed is True + + +async def test_ws_close_return_code(aiohttp_client: AiohttpClient) -> None: + """Test that the close code is returned when the server closes the connection.""" + + async def handler(request: web.Request) -> web.WebSocketResponse: + ws = web.WebSocketResponse() + await ws.prepare(request) + await ws.receive() + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + resp = await client.ws_connect("/") + await resp.send_str("some data") + msg = await resp.receive() + assert msg.type is aiohttp.WSMsgType.CLOSE + assert resp.close_code == WSCloseCode.OK + + +async def test_abnormal_closure_when_server_does_not_receive( + aiohttp_client: AiohttpClient, +) -> None: + """Test abnormal closure when the server closes and a message is pending.""" + + async def handler(request: web.Request) -> web.WebSocketResponse: + ws = web.WebSocketResponse() + await ws.prepare(request) + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + resp = await client.ws_connect("/") + await resp.send_str("some data") + await asyncio.sleep(0.1) + msg = await resp.receive() + assert msg.type is aiohttp.WSMsgType.CLOSE + assert resp.close_code == WSCloseCode.ABNORMAL_CLOSURE