From 9f05ef5fca4522ea77ba72d3bf1c8d95a6f56831 Mon Sep 17 00:00:00 2001 From: Stefan Schulz Date: Sun, 27 Mar 2022 21:22:35 +0200 Subject: [PATCH 1/2] anyio tests --- pyproject.toml | 8 +- tests/benchmarks/test_execution_async.py | 23 +++++- tests/conftest.py | 15 ++++ tests/execution/test_abstract.py | 4 +- tests/execution/test_executor.py | 12 +-- tests/execution/test_lists.py | 14 ++-- tests/execution/test_map_async_iterator.py | 89 +++++++++++++--------- tests/execution/test_middleware.py | 4 +- tests/execution/test_mutations.py | 10 +-- tests/execution/test_nonnull.py | 20 ++--- tests/execution/test_parallel.py | 23 +++--- tests/execution/test_subscribe.py | 48 ++++++------ tests/execution/test_sync.py | 10 +-- tests/pyutils/test_inspect.py | 2 +- tests/pyutils/test_is_awaitable.py | 8 +- tests/pyutils/test_simple_pub_sub.py | 10 +-- tests/test_star_wars_query.py | 36 ++++----- tests/test_user_registry.py | 30 +++----- 18 files changed, 207 insertions(+), 159 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 3a71c2e3..1e835183 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,15 +42,12 @@ packages = [ ] [tool.poetry.dependencies] -python = "^3.6" +python = "^3.6,>=3.6.2" typing-extensions = { version = "^4.0", python = "<3.8" } +anyio = "^3.5.0" [tool.poetry.dev-dependencies] pytest = "^6.2" -pytest-asyncio = [ - {version=">=0.17,<1", python = ">=3.7" }, - {version=">=0.16,<0.17", python = "<3.7" }, -] pytest-benchmark = "^3.4" pytest-cov = "^3.0" pytest-describe = "^2.0" @@ -66,6 +63,7 @@ sphinx_rtd_theme = ">=1,<2" check-manifest = ">=0.47,<1" bump2version = ">=1.0,<2" tox = "^3.24" +trio = { version = "^0.20.0", python = ">=3.7"} [tool.black] target-version = ['py36', 'py37', 'py38', 'py39', 'py310'] diff --git a/tests/benchmarks/test_execution_async.py b/tests/benchmarks/test_execution_async.py index de7de2e5..87b0cb87 100644 --- a/tests/benchmarks/test_execution_async.py +++ b/tests/benchmarks/test_execution_async.py @@ -1,4 +1,7 @@ import asyncio + +from pytest import mark + from graphql import ( GraphQLSchema, GraphQLObjectType, @@ -37,7 +40,10 @@ async def resolve_user(obj, info): ) -def test_execute_basic_async(benchmark): +@mark.parametrize("anyio_backend", ["asyncio"]) +def test_execute_basic_async(anyio_backend, benchmark): + # Note: test too low level for anyio, duplicated test for trio below + # Note: we are creating the async loop outside of the benchmark code so that # the setup is not included in the benchmark timings loop = asyncio.events.new_event_loop() @@ -54,3 +60,18 @@ def test_execute_basic_async(benchmark): "name": "Sarah", }, } + + +@mark.parametrize("anyio_backend", ["trio"]) +def test_execute_basic_async_trio(anyio_backend, benchmark): + # TODO: can the trio loop be started beforehand? run benchmark in async function somehow? + import trio + + result = benchmark(lambda: trio.run(graphql, schema, "query { user { id, name }}")) + assert not result.errors + assert result.data == { + "user": { + "id": "1", + "name": "Sarah", + }, + } diff --git a/tests/conftest.py b/tests/conftest.py index afb04855..a9cae61f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,7 +1,22 @@ # pytest configuration +import sys + import pytest +if sys.version_info >= (3, 7): + event_loops = [ + pytest.param(("asyncio"), id="asyncio"), + pytest.param(("trio"), id="trio"), + ] +else: + event_loops = [pytest.param(("asyncio"), id="asyncio")] + + +@pytest.fixture(params=event_loops) +def anyio_backend(request): + return request.param + def pytest_addoption(parser): parser.addoption( diff --git a/tests/execution/test_abstract.py b/tests/execution/test_abstract.py index efc130f1..4bfc0dfd 100644 --- a/tests/execution/test_abstract.py +++ b/tests/execution/test_abstract.py @@ -20,14 +20,14 @@ def sync_and_async(spec): """Decorator for running a test synchronously and asynchronously.""" - return mark.asyncio( + return mark.anyio( mark.parametrize("sync", (True, False), ids=("sync", "async"))(spec) ) def access_variants(spec): """Decorator for tests with dict and object access, including inheritance.""" - return mark.asyncio( + return mark.anyio( mark.parametrize("access", ("dict", "object", "inheritance"))(spec) ) diff --git a/tests/execution/test_executor.py b/tests/execution/test_executor.py index 7cd2260a..58b2ce39 100644 --- a/tests/execution/test_executor.py +++ b/tests/execution/test_executor.py @@ -1,4 +1,4 @@ -import asyncio +import anyio from typing import cast, Any, Awaitable, Optional from pytest import mark, raises @@ -91,7 +91,7 @@ def accepts_positional_arguments(): assert result == ({"a": "rootValue"}, None) - @mark.asyncio + @mark.anyio async def executes_arbitrary_code(): # noinspection PyMethodMayBeStatic,PyMethodMayBeStatic class Data: @@ -137,7 +137,7 @@ def deeper(self, _info): return [Data(), None, Data()] async def promise_data(): - await asyncio.sleep(0) + await anyio.sleep(0) return Data() DeepDataType: GraphQLObjectType @@ -422,7 +422,7 @@ def resolve(_obj, _info, **args): assert len(resolved_args) == 1 assert resolved_args[0] == {"numArg": 123, "stringArg": "foo"} - @mark.asyncio + @mark.anyio async def nulls_out_error_subtrees(): document = parse( """ @@ -832,7 +832,7 @@ def resolves_to_an_error_if_schema_does_not_support_operation(): ], ) - @mark.asyncio + @mark.anyio async def correct_field_ordering_despite_execution_order(): schema = GraphQLSchema( GraphQLObjectType( @@ -948,7 +948,7 @@ def does_not_include_arguments_that_were_not_set(): None, ) - @mark.asyncio + @mark.anyio async def fails_when_is_type_of_check_is_not_met(): class Special: value: str diff --git a/tests/execution/test_lists.py b/tests/execution/test_lists.py index 729c1191..7472e7e9 100644 --- a/tests/execution/test_lists.py +++ b/tests/execution/test_lists.py @@ -117,7 +117,7 @@ def execute_query(list_value: Any) -> Any: return result - @mark.asyncio + @mark.anyio async def contains_values(): list_field = [1, 2] assert await _complete(list_field, "[Int]") == ({"listField": [1, 2]}, None) @@ -125,7 +125,7 @@ async def contains_values(): assert await _complete(list_field, "[Int!]") == ({"listField": [1, 2]}, None) assert await _complete(list_field, "[Int!]!") == ({"listField": [1, 2]}, None) - @mark.asyncio + @mark.anyio async def contains_null(): list_field = [1, None, 2] errors = [ @@ -146,7 +146,7 @@ async def contains_null(): assert await _complete(list_field, "[Int!]") == ({"listField": None}, errors) assert await _complete(list_field, "[Int!]!") == (None, errors) - @mark.asyncio + @mark.anyio async def returns_null(): list_field = None errors = [ @@ -161,7 +161,7 @@ async def returns_null(): assert await _complete(list_field, "[Int!]") == ({"listField": None}, None) assert await _complete(list_field, "[Int!]!") == (None, errors) - @mark.asyncio + @mark.anyio async def contains_error(): list_field = [1, RuntimeError("bad"), 2] errors = [ @@ -188,7 +188,7 @@ async def contains_error(): errors, ) - @mark.asyncio + @mark.anyio async def results_in_errors(): list_field = RuntimeError("bad") errors = [ @@ -227,7 +227,7 @@ async def _complete(list_field): result = cast(Awaitable, result) return await result - @mark.asyncio + @mark.anyio async def accepts_an_async_generator_as_a_list_value(): async def list_field(): yield "one" @@ -239,7 +239,7 @@ async def list_field(): None, ) - @mark.asyncio + @mark.anyio async def accepts_a_custom_async_iterable_as_a_list_value(): class ListField: def __aiter__(self): diff --git a/tests/execution/test_map_async_iterator.py b/tests/execution/test_map_async_iterator.py index 299d010a..43f3d8dc 100644 --- a/tests/execution/test_map_async_iterator.py +++ b/tests/execution/test_map_async_iterator.py @@ -1,5 +1,5 @@ import sys -from asyncio import CancelledError, Event, ensure_future, sleep +from anyio import create_task_group, get_cancelled_exc_class, sleep, Event from pytest import mark, raises @@ -16,7 +16,7 @@ async def anext(iterator): def describe_map_async_iterator(): - @mark.asyncio + @mark.anyio async def maps_over_async_generator(): async def source(): yield 1 @@ -31,7 +31,7 @@ async def source(): with raises(StopAsyncIteration): assert await anext(doubles) - @mark.asyncio + @mark.anyio async def maps_over_async_iterable(): items = [1, 2, 3] @@ -52,7 +52,7 @@ async def __anext__(self): assert not items assert values == [2, 4, 6] - @mark.asyncio + @mark.anyio async def compatible_with_async_for(): async def source(): yield 1 @@ -65,7 +65,7 @@ async def source(): assert values == [2, 4, 6] - @mark.asyncio + @mark.anyio async def maps_over_async_values_with_async_function(): async def source(): yield 1 @@ -81,7 +81,7 @@ async def double(x): assert values == [2, 4, 6] - @mark.asyncio + @mark.anyio async def allows_returning_early_from_mapped_async_generator(): async def source(): yield 1 @@ -102,7 +102,7 @@ async def source(): with raises(StopAsyncIteration): await anext(doubles) - @mark.asyncio + @mark.anyio async def allows_returning_early_from_mapped_async_iterable(): items = [1, 2, 3] @@ -130,7 +130,7 @@ async def __anext__(self): with raises(StopAsyncIteration): await anext(doubles) - @mark.asyncio + @mark.anyio async def passes_through_early_return_from_async_values(): async def source(): try: @@ -154,7 +154,7 @@ async def source(): with raises(GeneratorExit): assert await anext(doubles) - @mark.asyncio + @mark.anyio async def allows_throwing_errors_through_async_iterable(): items = [1, 2, 3] @@ -184,7 +184,7 @@ async def __anext__(self): with raises(StopAsyncIteration): await anext(doubles) - @mark.asyncio + @mark.anyio async def allows_throwing_errors_with_values_through_async_iterators(): class Iterator: def __aiter__(self): @@ -210,7 +210,7 @@ async def __anext__(self): with raises(StopAsyncIteration): await anext(one) - @mark.asyncio + @mark.anyio async def allows_throwing_errors_with_traceback_through_async_iterators(): class Iterator: def __aiter__(self): @@ -236,7 +236,7 @@ async def __anext__(self): with raises(StopAsyncIteration): await anext(one) - @mark.asyncio + @mark.anyio async def passes_through_caught_errors_through_async_generators(): async def source(): try: @@ -259,7 +259,7 @@ async def source(): with raises(StopAsyncIteration): await anext(doubles) - @mark.asyncio + @mark.anyio async def does_not_normally_map_over_thrown_errors(): async def source(): yield "Hello" @@ -274,7 +274,7 @@ async def source(): assert str(exc_info.value) == "Goodbye" - @mark.asyncio + @mark.anyio async def does_not_normally_map_over_externally_thrown_errors(): async def source(): yield "Hello" @@ -288,7 +288,7 @@ async def source(): assert str(exc_info.value) == "Goodbye" - @mark.asyncio + @mark.anyio async def can_use_simple_iterator_instead_of_generator(): async def source(): yield 1 @@ -367,7 +367,7 @@ def double(x): await sleep(0) - @mark.asyncio + @mark.anyio async def stops_async_iteration_on_close(): async def source(): yield 1 @@ -381,21 +381,35 @@ async def source(): result = await anext(doubles) assert result == 2 - # Make sure it is blocked - doubles_future = ensure_future(anext(doubles)) - await sleep(0.05) - assert not doubles_future.done() + done = False - # Unblock and watch StopAsyncIteration propagate - await doubles.aclose() - await sleep(0.05) - assert doubles_future.done() - assert isinstance(doubles_future.exception(), StopAsyncIteration) + async def set_done(): + nonlocal done + try: + await anext(doubles) + except StopAsyncIteration: + done = True + raise + + with raises(StopAsyncIteration): + async with create_task_group() as tg: + # Make sure it is blocked + tg.start_soon(set_done) + await sleep(0.05) + assert not done + + # Unblock and watch StopAsyncIteration propagate + try: + await doubles.aclose() + except: + assert False # ensure that aclose does not raise a StopAsyncIteration + + assert done with raises(StopAsyncIteration): await anext(singles) - @mark.asyncio + @mark.anyio async def can_unset_closed_state_of_async_iterator(): items = [1, 2, 3] @@ -445,7 +459,7 @@ async def aclose(self): assert not doubles.is_closed assert not iterator.is_closed - @mark.asyncio + @mark.anyio async def can_cancel_async_iterator_while_waiting(): class Iterator: def __init__(self): @@ -459,7 +473,7 @@ async def __anext__(self): try: await sleep(0.5) return self.value # pragma: no cover - except CancelledError: + except get_cancelled_exc_class(): self.value = -1 raise @@ -475,17 +489,18 @@ async def iterator_task(): try: async for _ in doubles: assert False # pragma: no cover - except CancelledError: + except get_cancelled_exc_class(): cancelled = True - task = ensure_future(iterator_task()) - await sleep(0.05) - assert not cancelled - assert not doubles.is_closed - assert iterator.value == 1 - assert not iterator.is_closed - task.cancel() - await sleep(0.05) + async with create_task_group() as tg: + tg.start_soon(iterator_task) + await sleep(0.05) + assert not cancelled + assert not doubles.is_closed + assert iterator.value == 1 + assert not iterator.is_closed + tg.cancel_scope.cancel() + assert cancelled assert iterator.value == -1 assert doubles.is_closed diff --git a/tests/execution/test_middleware.py b/tests/execution/test_middleware.py index 6db8bdab..e8a7a4ed 100644 --- a/tests/execution/test_middleware.py +++ b/tests/execution/test_middleware.py @@ -90,7 +90,7 @@ def capitalize_middleware(next_, *args, **kwargs): assert result.data == {"first": "Eno", "second": "Owt"} # type: ignore - @mark.asyncio + @mark.anyio async def single_async_function(): doc = parse("{ first second }") @@ -202,7 +202,7 @@ def resolve(self, next_, *args, **kwargs): ) assert result.data == {"field": "devloseR"} # type: ignore - @mark.asyncio + @mark.anyio async def with_async_function_and_object(): doc = parse("{ field }") diff --git a/tests/execution/test_mutations.py b/tests/execution/test_mutations.py index 819eddd4..b9d25e58 100644 --- a/tests/execution/test_mutations.py +++ b/tests/execution/test_mutations.py @@ -1,4 +1,4 @@ -import asyncio +import anyio from typing import Awaitable from pytest import mark @@ -36,14 +36,14 @@ def immediately_change_the_number(self, newNumber: int) -> NumberHolder: return self.numberHolder async def promise_to_change_the_number(self, new_number: int) -> NumberHolder: - await asyncio.sleep(0) + await anyio.sleep(0) return self.immediately_change_the_number(new_number) def fail_to_change_the_number(self, newNumber: int): raise RuntimeError(f"Cannot change the number to {newNumber}") async def promise_and_fail_to_change_the_number(self, newNumber: int): - await asyncio.sleep(0) + await anyio.sleep(0) self.fail_to_change_the_number(newNumber) @@ -91,7 +91,7 @@ async def promise_and_fail_to_change_the_number(self, newNumber: int): def describe_execute_handles_mutation_execution_ordering(): - @mark.asyncio + @mark.anyio async def evaluates_mutations_serially(): document = parse( """ @@ -139,7 +139,7 @@ def does_not_include_illegal_mutation_fields_in_output(): result = execute_sync(schema=schema, document=document) assert result == ({}, None) - @mark.asyncio + @mark.anyio async def evaluates_mutations_correctly_in_presence_of_a_failed_mutation(): document = parse( """ diff --git a/tests/execution/test_nonnull.py b/tests/execution/test_nonnull.py index da15e1b6..999c2bc4 100644 --- a/tests/execution/test_nonnull.py +++ b/tests/execution/test_nonnull.py @@ -125,12 +125,12 @@ def describe_nulls_a_nullable_field(): } """ - @mark.asyncio + @mark.anyio async def returns_null(): result = await execute_sync_and_async(query, NullingData()) assert result == ({"sync": None}, None) - @mark.asyncio + @mark.anyio async def throws(): result = await execute_sync_and_async(query, ThrowingData()) assert result == ( @@ -154,7 +154,7 @@ def describe_nulls_a_returned_object_that_contains_a_non_null_field(): } """ - @mark.asyncio + @mark.anyio async def that_returns_null(): result = await execute_sync_and_async(query, NullingData()) assert result == ( @@ -169,7 +169,7 @@ async def that_returns_null(): ], ) - @mark.asyncio + @mark.anyio async def that_throws(): result = await execute_sync_and_async(query, ThrowingData()) assert result == ( @@ -215,14 +215,14 @@ def describe_nulls_a_complex_tree_of_nullable_fields_each(): }, } - @mark.asyncio + @mark.anyio async def returns_null(): result = await cast( Awaitable[ExecutionResult], execute_query(query, NullingData()) ) assert result == (data, None) - @mark.asyncio + @mark.anyio async def throws(): result = await cast( Awaitable[ExecutionResult], execute_query(query, ThrowingData()) @@ -349,7 +349,7 @@ def describe_nulls_first_nullable_after_long_chain_of_non_null_fields(): "anotherPromiseNest": None, } - @mark.asyncio + @mark.anyio async def returns_null(): result = await cast( Awaitable[ExecutionResult], execute_query(query, NullingData()) @@ -412,7 +412,7 @@ async def returns_null(): ], ) - @mark.asyncio + @mark.anyio async def throws(): result = await cast( Awaitable[ExecutionResult], execute_query(query, ThrowingData()) @@ -478,7 +478,7 @@ def describe_nulls_the_top_level_if_non_nullable_field(): } """ - @mark.asyncio + @mark.anyio async def returns_null(): result = await execute_sync_and_async(query, NullingData()) assert result == ( @@ -493,7 +493,7 @@ async def returns_null(): ], ) - @mark.asyncio + @mark.anyio async def throws(): result = await execute_sync_and_async(query, ThrowingData()) assert result == ( diff --git a/tests/execution/test_parallel.py b/tests/execution/test_parallel.py index aeb2a142..fc74025b 100644 --- a/tests/execution/test_parallel.py +++ b/tests/execution/test_parallel.py @@ -1,4 +1,4 @@ -import asyncio +import anyio from typing import Awaitable from pytest import mark @@ -21,18 +21,19 @@ class Barrier: """Barrier that makes progress only after a certain number of waits.""" def __init__(self, number: int): - self.event = asyncio.Event() + self.event = anyio.Event() self.number = number async def wait(self) -> bool: self.number -= 1 if not self.number: self.event.set() - return await self.event.wait() + await self.event.wait() + return True def describe_parallel_execution(): - @mark.asyncio + @mark.anyio async def resolve_fields_in_parallel(): barrier = Barrier(2) @@ -54,11 +55,13 @@ async def resolve(*_args): # raises TimeoutError if not parallel awaitable_result = execute(schema, ast) assert isinstance(awaitable_result, Awaitable) - result = await asyncio.wait_for(awaitable_result, 1.0) + + with anyio.fail_after(1.0): + result = await awaitable_result assert result == ({"foo": True, "bar": True}, None) - @mark.asyncio + @mark.anyio async def resolve_list_in_parallel(): barrier = Barrier(2) @@ -84,11 +87,12 @@ async def resolve_list(*args): # raises TimeoutError if not parallel awaitable_result = execute(schema, ast) assert isinstance(awaitable_result, Awaitable) - result = await asyncio.wait_for(awaitable_result, 1.0) + with anyio.fail_after(1.0): + result = await awaitable_result assert result == ({"foo": [True, True]}, None) - @mark.asyncio + @mark.anyio async def resolve_is_type_of_in_parallel(): FooType = GraphQLInterfaceType("Foo", {"foo": GraphQLField(GraphQLString)}) @@ -147,7 +151,8 @@ async def is_type_of_baz(obj, *_args): # raises TimeoutError if not parallel awaitable_result = execute(schema, ast) assert isinstance(awaitable_result, Awaitable) - result = await asyncio.wait_for(awaitable_result, 1.0) + with anyio.fail_after(1.0): + result = await awaitable_result assert result == ( {"foo": [{"foo": "bar", "foobar": 1}, {"foo": "baz", "foobaz": 2}]}, diff --git a/tests/execution/test_subscribe.py b/tests/execution/test_subscribe.py index 8b1fe639..1b7d678f 100644 --- a/tests/execution/test_subscribe.py +++ b/tests/execution/test_subscribe.py @@ -1,4 +1,4 @@ -import asyncio +import anyio from typing import Any, Dict, List, Callable @@ -121,7 +121,7 @@ def transform(new_email): # Check all error cases when initializing the subscription. def describe_subscription_initialization_phase(): - @mark.asyncio + @mark.anyio async def accepts_positional_arguments(): document = parse( """ @@ -143,7 +143,7 @@ async def empty_async_iterator(_info): await anext(ai) await ai.aclose() # type: ignore - @mark.asyncio + @mark.anyio async def accepts_multiple_subscription_fields_defined_in_schema(): schema = GraphQLSchema( query=DummyQueryType, @@ -168,7 +168,7 @@ async def foo_generator(_info): await subscription.aclose() - @mark.asyncio + @mark.anyio async def accepts_type_definition_with_sync_subscribe_function(): async def foo_generator(_obj, _info): yield {"foo": "FooValue"} @@ -188,10 +188,10 @@ async def foo_generator(_obj, _info): await subscription.aclose() - @mark.asyncio + @mark.anyio async def accepts_type_definition_with_async_subscribe_function(): async def foo_generator(_obj, _info): - await asyncio.sleep(0) + await anyio.sleep(0) yield {"foo": "FooValue"} schema = GraphQLSchema( @@ -209,7 +209,7 @@ async def foo_generator(_obj, _info): await subscription.aclose() - @mark.asyncio + @mark.anyio async def uses_a_custom_default_subscribe_field_resolver(): schema = GraphQLSchema( query=DummyQueryType, @@ -238,7 +238,7 @@ async def custom_foo(): await subscription.aclose() - @mark.asyncio + @mark.anyio async def should_only_resolve_the_first_field_of_invalid_multi_field(): did_resolve = {"foo": False, "bar": False} @@ -273,7 +273,7 @@ async def subscribe_bar(_obj, _info): # pragma: no cover await subscription.aclose() - @mark.asyncio + @mark.anyio async def throws_an_error_if_some_of_required_arguments_are_missing(): document = parse("subscription { foo }") @@ -296,7 +296,7 @@ async def throws_an_error_if_some_of_required_arguments_are_missing(): with raises(TypeError, match="missing .* positional argument: 'document'"): await subscribe(schema=schema) # type: ignore - @mark.asyncio + @mark.anyio async def resolves_to_an_error_if_schema_does_not_support_subscriptions(): schema = GraphQLSchema(query=DummyQueryType) document = parse("subscription { unknownField }") @@ -314,7 +314,7 @@ async def resolves_to_an_error_if_schema_does_not_support_subscriptions(): ], ) - @mark.asyncio + @mark.anyio async def resolves_to_an_error_for_unknown_subscription_field(): schema = GraphQLSchema( query=DummyQueryType, @@ -335,7 +335,7 @@ async def resolves_to_an_error_for_unknown_subscription_field(): ], ) - @mark.asyncio + @mark.anyio async def should_pass_through_unexpected_errors_thrown_in_subscribe(): schema = GraphQLSchema( query=DummyQueryType, @@ -346,7 +346,7 @@ async def should_pass_through_unexpected_errors_thrown_in_subscribe(): with raises(TypeError, match="^Must provide document\\.$"): await subscribe(schema=schema, document={}) # type: ignore - @mark.asyncio + @mark.anyio @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_an_error_if_subscribe_does_not_return_an_iterator(): schema = GraphQLSchema( @@ -370,7 +370,7 @@ async def throws_an_error_if_subscribe_does_not_return_an_iterator(): "Subscription field must return AsyncIterable. Received: 'test'." ) - @mark.asyncio + @mark.anyio async def resolves_to_an_error_for_subscription_resolver_errors(): async def subscribe_with_fn(subscribe_fn: Callable): schema = GraphQLSchema( @@ -421,7 +421,7 @@ async def reject_error(*_args): assert await subscribe_with_fn(reject_error) == expected_result - @mark.asyncio + @mark.anyio async def resolves_to_an_error_if_variables_were_wrong_type(): schema = GraphQLSchema( query=DummyQueryType, @@ -464,7 +464,7 @@ async def resolves_to_an_error_if_variables_were_wrong_type(): # Once a subscription returns a valid AsyncIterator, it can still yield errors. def describe_subscription_publish_phase(): - @mark.asyncio + @mark.anyio async def produces_a_payload_for_multiple_subscribe_in_same_subscription(): pubsub = SimplePubSub() @@ -499,7 +499,7 @@ async def produces_a_payload_for_multiple_subscribe_in_same_subscription(): assert await payload1 == (expected_payload, None) assert await payload2 == (expected_payload, None) - @mark.asyncio + @mark.anyio async def produces_a_payload_per_subscription_event(): pubsub = SimplePubSub() subscription = await create_subscription(pubsub) @@ -577,7 +577,7 @@ async def produces_a_payload_per_subscription_event(): with raises(StopAsyncIteration): assert await anext(subscription) - @mark.asyncio + @mark.anyio async def produces_a_payload_when_there_are_multiple_events(): pubsub = SimplePubSub() subscription = await create_subscription(pubsub) @@ -633,7 +633,7 @@ async def produces_a_payload_when_there_are_multiple_events(): None, ) - @mark.asyncio + @mark.anyio async def should_not_trigger_when_subscription_is_already_done(): pubsub = SimplePubSub() subscription = await create_subscription(pubsub) @@ -683,7 +683,7 @@ async def should_not_trigger_when_subscription_is_already_done(): with raises(StopAsyncIteration): await payload - @mark.asyncio + @mark.anyio async def should_not_trigger_when_subscription_is_thrown(): pubsub = SimplePubSub() subscription = await create_subscription(pubsub) @@ -724,7 +724,7 @@ async def should_not_trigger_when_subscription_is_thrown(): with raises(StopAsyncIteration): await payload - @mark.asyncio + @mark.anyio async def event_order_is_correct_for_multiple_publishes(): pubsub = SimplePubSub() subscription = await create_subscription(pubsub) @@ -780,7 +780,7 @@ async def event_order_is_correct_for_multiple_publishes(): None, ) - @mark.asyncio + @mark.anyio async def should_handle_error_during_execution_of_source_event(): async def generate_messages(_obj, _info): yield "Hello" @@ -828,7 +828,7 @@ def resolve_message(message, _info): # Subsequent events are still executed. assert await anext(subscription) == ({"newMessage": "Bonjour"}, None) - @mark.asyncio + @mark.anyio async def should_pass_through_error_thrown_in_source_event_stream(): async def generate_messages(_obj, _info): yield "Hello" @@ -865,7 +865,7 @@ def resolve_message(message, _info): with raises(StopAsyncIteration): await anext(subscription) - @mark.asyncio + @mark.anyio async def should_work_with_async_resolve_function(): async def generate_messages(_obj, _info): yield "Hello" diff --git a/tests/execution/test_sync.py b/tests/execution/test_sync.py index d5604310..273f3e08 100644 --- a/tests/execution/test_sync.py +++ b/tests/execution/test_sync.py @@ -52,7 +52,7 @@ def does_not_return_an_awaitable_if_mutation_fields_are_all_synchronous(): None, ) - @mark.asyncio + @mark.anyio async def returns_an_awaitable_if_any_field_is_asynchronous(): doc = "query Example { syncField, asyncField }" result = execute(schema, parse(doc), "rootValue") @@ -81,7 +81,7 @@ def does_not_throw_if_not_encountering_async_execution_with_check_sync(): None, ) - @mark.asyncio + @mark.anyio @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_if_encountering_async_execution_with_check_sync(): doc = "query Example { syncField, asyncField }" @@ -92,7 +92,7 @@ async def throws_if_encountering_async_execution_with_check_sync(): msg = str(exc_info.value) assert msg == "GraphQL execution failed to complete synchronously." - @mark.asyncio + @mark.anyio @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_if_encountering_async_operation_without_check_sync(): doc = "query Example { syncField, asyncField }" @@ -150,7 +150,7 @@ def does_not_throw_if_not_encountering_async_operation_with_check_sync(): None, ) - @mark.asyncio + @mark.anyio @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_if_encountering_async_operation_with_check_sync(): doc = "query Example { syncField, asyncField }" @@ -159,7 +159,7 @@ async def throws_if_encountering_async_operation_with_check_sync(): msg = str(exc_info.value) assert msg == "GraphQL execution failed to complete synchronously." - @mark.asyncio + @mark.anyio @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_if_encountering_async_operation_without_check_sync(): doc = "query Example { syncField, asyncField }" diff --git a/tests/pyutils/test_inspect.py b/tests/pyutils/test_inspect.py index 6ace1fcd..3528ea0a 100644 --- a/tests/pyutils/test_inspect.py +++ b/tests/pyutils/test_inspect.py @@ -137,7 +137,7 @@ def test_generator(): assert inspect(test_generator) == "" assert inspect(test_generator()) == "" - @mark.asyncio + @mark.anyio async def inspect_coroutine(): async def test_coroutine(): pass diff --git a/tests/pyutils/test_is_awaitable.py b/tests/pyutils/test_is_awaitable.py index 400423ba..91c65943 100644 --- a/tests/pyutils/test_is_awaitable.py +++ b/tests/pyutils/test_is_awaitable.py @@ -66,7 +66,7 @@ async def some_coroutine(): assert not isawaitable(some_coroutine) assert not is_awaitable(some_coroutine) - @mark.asyncio + @mark.anyio @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def recognizes_a_coroutine_object(): async def some_coroutine(): @@ -84,9 +84,9 @@ def some_old_style_coroutine(): assert is_awaitable(some_old_style_coroutine()) assert is_awaitable(some_old_style_coroutine()) - @mark.asyncio + @mark.parametrize("anyio_backend", ["asyncio"]) @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") - async def recognizes_a_future_object(): + async def recognizes_a_future_object(anyio_backend): async def some_coroutine(): return False # pragma: no cover @@ -95,7 +95,7 @@ async def some_coroutine(): assert is_awaitable(some_future) assert is_awaitable(some_future) - @mark.asyncio + @mark.anyio @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") def declines_an_async_generator(): async def some_async_generator(): diff --git a/tests/pyutils/test_simple_pub_sub.py b/tests/pyutils/test_simple_pub_sub.py index 47060d3f..59861d94 100644 --- a/tests/pyutils/test_simple_pub_sub.py +++ b/tests/pyutils/test_simple_pub_sub.py @@ -1,4 +1,4 @@ -from asyncio import sleep +from anyio import sleep from inspect import isawaitable from pytest import mark, raises @@ -7,7 +7,7 @@ def describe_simple_pub_sub(): - @mark.asyncio + @mark.anyio async def subscribe_async_iterator_mock(): pubsub = SimplePubSub() iterator = pubsub.get_subscriber() @@ -51,7 +51,7 @@ async def subscribe_async_iterator_mock(): with raises(StopAsyncIteration): await iterator.__anext__() - @mark.asyncio + @mark.anyio async def iterator_aclose_empties_push_queue(): pubsub = SimplePubSub() assert not pubsub.subscribers @@ -69,7 +69,7 @@ async def iterator_aclose_empties_push_queue(): assert iterator.pull_queue.qsize() == 0 assert not iterator.listening - @mark.asyncio + @mark.anyio async def iterator_aclose_empties_pull_queue(): pubsub = SimplePubSub() assert not pubsub.subscribers @@ -86,7 +86,7 @@ async def iterator_aclose_empties_pull_queue(): assert iterator.pull_queue.qsize() == 0 assert not iterator.listening - @mark.asyncio + @mark.anyio async def iterator_aclose_is_idempotent(): pubsub = SimplePubSub() iterator = pubsub.get_subscriber() diff --git a/tests/test_star_wars_query.py b/tests/test_star_wars_query.py index 5c6fa40e..9051f701 100644 --- a/tests/test_star_wars_query.py +++ b/tests/test_star_wars_query.py @@ -7,7 +7,7 @@ def describe_star_wars_query_tests(): def describe_basic_queries(): - @mark.asyncio + @mark.anyio async def correctly_identifies_r2_d2_as_hero_of_the_star_wars_saga(): source = """ query HeroNameQuery { @@ -19,7 +19,7 @@ async def correctly_identifies_r2_d2_as_hero_of_the_star_wars_saga(): result = await graphql(schema=schema, source=source) assert result == ({"hero": {"name": "R2-D2"}}, None) - @mark.asyncio + @mark.anyio async def accepts_positional_arguments_to_graphql(): source = """ query HeroNameQuery { @@ -34,7 +34,7 @@ async def accepts_positional_arguments_to_graphql(): sync_result = graphql_sync(schema, source) assert sync_result == result - @mark.asyncio + @mark.anyio async def allows_us_to_query_for_the_id_and_friends_of_r2_d2(): source = """ query HeroNameAndFriendsQuery { @@ -64,7 +64,7 @@ async def allows_us_to_query_for_the_id_and_friends_of_r2_d2(): ) def describe_nested_queries(): - @mark.asyncio + @mark.anyio async def allows_us_to_query_for_the_friends_of_friends_of_r2_d2(): source = """ query NestedQuery { @@ -122,7 +122,7 @@ async def allows_us_to_query_for_the_friends_of_friends_of_r2_d2(): ) def describe_using_ids_and_query_parameters_to_refetch_objects(): - @mark.asyncio + @mark.anyio async def allows_us_to_query_for_r2_d2_directly_using_his_id(): source = """ query { @@ -134,7 +134,7 @@ async def allows_us_to_query_for_r2_d2_directly_using_his_id(): result = await graphql(schema=schema, source=source) assert result == ({"droid": {"name": "R2-D2"}}, None) - @mark.asyncio + @mark.anyio async def allows_us_to_query_characters_directly_using_their_id(): source = """ query FetchLukeAndC3POQuery { @@ -152,7 +152,7 @@ async def allows_us_to_query_characters_directly_using_their_id(): None, ) - @mark.asyncio + @mark.anyio async def allows_creating_a_generic_query_to_fetch_luke_using_his_id(): source = """ query FetchSomeIDQuery($someId: String!) { @@ -167,7 +167,7 @@ async def allows_creating_a_generic_query_to_fetch_luke_using_his_id(): ) assert result == ({"human": {"name": "Luke Skywalker"}}, None) - @mark.asyncio + @mark.anyio async def allows_creating_a_generic_query_to_fetch_han_using_his_id(): source = """ query FetchSomeIDQuery($someId: String!) { @@ -182,7 +182,7 @@ async def allows_creating_a_generic_query_to_fetch_han_using_his_id(): ) assert result == ({"human": {"name": "Han Solo"}}, None) - @mark.asyncio + @mark.anyio async def generic_query_that_gets_null_back_when_passed_invalid_id(): source = """ query humanQuery($id: String!) { @@ -198,7 +198,7 @@ async def generic_query_that_gets_null_back_when_passed_invalid_id(): assert result == ({"human": None}, None) def describe_using_aliases_to_change_the_key_in_the_response(): - @mark.asyncio + @mark.anyio async def allows_us_to_query_for_luke_changing_his_key_with_an_alias(): source = """ query FetchLukeAliased { @@ -210,7 +210,7 @@ async def allows_us_to_query_for_luke_changing_his_key_with_an_alias(): result = await graphql(schema=schema, source=source) assert result == ({"luke": {"name": "Luke Skywalker"}}, None) - @mark.asyncio + @mark.anyio async def query_for_luke_and_leia_using_two_root_fields_and_an_alias(): source = """ query FetchLukeAndLeiaAliased { @@ -229,7 +229,7 @@ async def query_for_luke_and_leia_using_two_root_fields_and_an_alias(): ) def describe_uses_fragments_to_express_more_complex_queries(): - @mark.asyncio + @mark.anyio async def allows_us_to_query_using_duplicated_content(): source = """ query DuplicateFields { @@ -252,7 +252,7 @@ async def allows_us_to_query_using_duplicated_content(): None, ) - @mark.asyncio + @mark.anyio async def allows_us_to_use_a_fragment_to_avoid_duplicating_content(): source = """ query UseFragment { @@ -278,7 +278,7 @@ async def allows_us_to_use_a_fragment_to_avoid_duplicating_content(): ) def describe_using_typename_to_find_the_type_of_an_object(): - @mark.asyncio + @mark.anyio async def allows_us_to_verify_that_r2_d2_is_a_droid(): source = """ query CheckTypeOfR2 { @@ -291,7 +291,7 @@ async def allows_us_to_verify_that_r2_d2_is_a_droid(): result = await graphql(schema=schema, source=source) assert result == ({"hero": {"__typename": "Droid", "name": "R2-D2"}}, None) - @mark.asyncio + @mark.anyio async def allows_us_to_verify_that_luke_is_a_human(): source = """ query CheckTypeOfLuke { @@ -308,7 +308,7 @@ async def allows_us_to_verify_that_luke_is_a_human(): ) def describe_reporting_errors_raised_in_resolvers(): - @mark.asyncio + @mark.anyio async def correctly_reports_error_on_accessing_secret_backstory(): source = """ query HeroNameQuery { @@ -330,7 +330,7 @@ async def correctly_reports_error_on_accessing_secret_backstory(): ], ) - @mark.asyncio + @mark.anyio async def correctly_reports_error_on_accessing_backstory_in_a_list(): source = """ query HeroNameQuery { @@ -374,7 +374,7 @@ async def correctly_reports_error_on_accessing_backstory_in_a_list(): ], ) - @mark.asyncio + @mark.anyio async def correctly_reports_error_on_accessing_through_an_alias(): source = """ query HeroNameQuery { diff --git a/tests/test_user_registry.py b/tests/test_user_registry.py index d5f2ba95..fdbd7f9a 100644 --- a/tests/test_user_registry.py +++ b/tests/test_user_registry.py @@ -4,23 +4,18 @@ operations on a simulated user registry database backend. """ -from asyncio import sleep, wait +from anyio import create_task_group, fail_after, sleep from collections import defaultdict from enum import Enum from inspect import isawaitable from typing import Any, Dict, List, NamedTuple, Optional -try: - from asyncio import create_task -except ImportError: # Python < 3.7 - create_task = None # type: ignore - from pytest import fixture, mark from graphql import ( graphql, parse, - subscribe, + subscribe, GraphQLArgument, GraphQLBoolean, GraphQLEnumType, @@ -227,7 +222,7 @@ def context(): def describe_query(): - @mark.asyncio + @mark.anyio async def query_user(context): user = await context["registry"].create( firstName="John", lastName="Doe", tweets=42, verified=True @@ -259,7 +254,7 @@ async def query_user(context): def describe_mutation(): - @mark.asyncio + @mark.anyio async def create_user(context): received = {} @@ -306,7 +301,7 @@ def receive(msg): "User 0": {"user": user, "mutation": MutationEnum.CREATED.value}, } - @mark.asyncio + @mark.anyio async def update_user(context): received = {} @@ -362,7 +357,7 @@ def receive(msg): "User 0": {"user": user, "mutation": MutationEnum.UPDATED.value}, } - @mark.asyncio + @mark.anyio async def delete_user(context): received = {} @@ -404,7 +399,7 @@ def receive(msg): def describe_subscription(): - @mark.asyncio + @mark.anyio async def subscribe_to_user_mutations(context): query = """ subscription ($userId: ID!) { @@ -509,12 +504,11 @@ async def receive_all(): if len(received_all) == 6: # pragma: no cover else break - tasks = [ - create_task(task()) if create_task else task() - for task in (mutate_users, receive_one, receive_all) - ] - done, pending = await wait(tasks, timeout=1) - assert not pending + with fail_after(delay=1): + async with create_task_group() as tg: + tg.start_soon(mutate_users) + tg.start_soon(receive_one) + tg.start_soon(receive_all) expected_data: List[Dict[str, Any]] = [ { From 36774ce74a6ed27aa2c58ea5a9bb6b89d23a49b6 Mon Sep 17 00:00:00 2001 From: Stefan Schulz Date: Sat, 2 Apr 2022 18:45:37 +0200 Subject: [PATCH 2/2] all trio and asyncio tests successful for python 3.7, 3.8, 3.9, 3.10 --- poetry.lock | 433 +++++++++++++------- pyproject.toml | 7 +- src/graphql/execution/execute.py | 66 ++- src/graphql/execution/map_async_iterator.py | 63 +-- src/graphql/execution/subscribe.py | 3 + src/graphql/graphql.py | 4 +- src/graphql/pyutils/__init__.py | 3 + src/graphql/pyutils/broadcast_stream.py | 148 +++++++ src/graphql/pyutils/simple_pub_sub.py | 6 + src/graphql/utilities/ast_from_value.py | 2 +- tests/benchmarks/test_execution_async.py | 3 +- tests/execution/test_map_async_iterator.py | 15 +- tests/execution/test_parallel.py | 126 +++++- tests/execution/test_subscribe.py | 42 +- tests/pyutils/test_broadcast_stream.py | 192 +++++++++ tests/pyutils/test_simple_pub_sub.py | 32 +- tests/test_user_registry.py | 286 +++++++------ tox.ini | 6 +- 18 files changed, 1068 insertions(+), 369 deletions(-) create mode 100644 src/graphql/pyutils/broadcast_stream.py create mode 100644 tests/pyutils/test_broadcast_stream.py diff --git a/poetry.lock b/poetry.lock index 4251c585..0a3baa3c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -7,12 +7,32 @@ optional = false python-versions = "*" [[package]] -name = "appdirs" -version = "1.4.4" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" +name = "anyio" +version = "3.5.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6.2" + +[package.dependencies] +contextvars = {version = "*", markers = "python_version < \"3.7\""} +dataclasses = {version = "*", markers = "python_version < \"3.7\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = "*", markers = "python_version < \"3.8\""} + +[package.extras] +doc = ["packaging", "sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"] +test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=6.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"] +trio = ["trio (>=0.16)"] + +[[package]] +name = "async-generator" +version = "1.10" +description = "Async generators and context managers for Python 3.5+" +category = "main" +optional = false +python-versions = ">=3.5" [[package]] name = "atomicwrites" @@ -26,7 +46,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" name = "attrs" version = "21.4.0" description = "Classes Without Boilerplate" -category = "dev" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" @@ -47,29 +67,6 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [package.dependencies] pytz = ">=2015.7" -[[package]] -name = "black" -version = "20.8b1" -description = "The uncompromising code formatter." -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -appdirs = "*" -click = ">=7.1.2" -dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} -mypy-extensions = ">=0.4.3" -pathspec = ">=0.6,<1" -regex = ">=2020.1.8" -toml = ">=0.10.1" -typed-ast = ">=1.4.0" -typing-extensions = ">=3.7.4" - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] - [[package]] name = "black" version = "22.1.0" @@ -131,6 +128,17 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "cffi" +version = "1.15.0" +description = "Foreign Function Interface for Python calling C code." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +pycparser = "*" + [[package]] name = "charset-normalizer" version = "2.0.11" @@ -177,6 +185,17 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "contextvars" +version = "2.4" +description = "PEP 567 Backport" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +immutables = ">=0.9" + [[package]] name = "coverage" version = "6.2" @@ -195,7 +214,7 @@ toml = ["tomli"] name = "dataclasses" version = "0.8" description = "A backport of the dataclasses module for Python 3.6" -category = "dev" +category = "main" optional = false python-versions = ">=3.6, <3.7" @@ -245,7 +264,7 @@ pyflakes = ">=2.4.0,<2.5.0" name = "idna" version = "3.3" description = "Internationalized Domain Names in Applications (IDNA)" -category = "dev" +category = "main" optional = false python-versions = ">=3.5" @@ -257,6 +276,20 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +[[package]] +name = "immutables" +version = "0.17" +description = "Immutable Collections" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\""} + +[package.extras] +test = ["flake8 (>=3.8.4,<3.9.0)", "pycodestyle (>=2.6.0,<2.7.0)", "mypy (>=0.910)", "pytest (>=6.2.4,<6.3.0)"] + [[package]] name = "importlib-metadata" version = "4.2.0" @@ -352,6 +385,17 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "outcome" +version = "1.1.0" +description = "Capture the outcome of Python function calls." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +attrs = ">=19.2.0" + [[package]] name = "packaging" version = "21.3" @@ -435,6 +479,14 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + [[package]] name = "pyflakes" version = "2.4.0" @@ -484,35 +536,6 @@ toml = "*" [package.extras] testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] -[[package]] -name = "pytest-asyncio" -version = "0.16.0" -description = "Pytest support for asyncio." -category = "dev" -optional = false -python-versions = ">= 3.6" - -[package.dependencies] -pytest = ">=5.4.0" - -[package.extras] -testing = ["coverage", "hypothesis (>=5.7.1)"] - -[[package]] -name = "pytest-asyncio" -version = "0.17.2" -description = "Pytest support for asyncio" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -pytest = ">=6.1.0" -typing-extensions = {version = ">=4.0", markers = "python_version < \"3.8\""} - -[package.extras] -testing = ["coverage (==6.2)", "hypothesis (>=5.7.1)", "flaky (>=3.5.0)", "mypy (==0.931)"] - [[package]] name = "pytest-benchmark" version = "3.4.1" @@ -575,14 +598,6 @@ category = "dev" optional = false python-versions = "*" -[[package]] -name = "regex" -version = "2022.1.18" -description = "Alternative regular expression module, to replace re." -category = "dev" -optional = false -python-versions = "*" - [[package]] name = "requests" version = "2.27.1" @@ -609,6 +624,17 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +[[package]] +name = "sniffio" +version = "1.2.0" +description = "Sniff out which async library your code is running under" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +contextvars = {version = ">=2.1", markers = "python_version < \"3.7\""} + [[package]] name = "snowballstemmer" version = "2.2.0" @@ -617,6 +643,14 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "sortedcontainers" +version = "2.4.0" +description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" +category = "main" +optional = false +python-versions = "*" + [[package]] name = "sphinx" version = "4.3.2" @@ -773,6 +807,39 @@ virtualenv = ">=16.0.0,<20.0.0 || >20.0.0,<20.0.1 || >20.0.1,<20.0.2 || >20.0.2, docs = ["pygments-github-lexers (>=0.0.5)", "sphinx (>=2.0.0)", "sphinxcontrib-autoprogram (>=0.1.5)", "towncrier (>=18.5.0)"] testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pytest (>=4.0.0)", "pytest-cov (>=2.5.1)", "pytest-mock (>=1.10.0)", "pytest-randomly (>=1.0.0)", "psutil (>=5.6.1)", "pathlib2 (>=2.3.3)"] +[[package]] +name = "trio" +version = "0.20.0" +description = "A friendly Python library for async concurrency and I/O" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +async-generator = ">=1.9" +attrs = ">=19.2.0" +cffi = {version = ">=1.14", markers = "os_name == \"nt\" and implementation_name != \"pypy\""} +idna = "*" +outcome = "*" +sniffio = "*" +sortedcontainers = "*" + +[[package]] +name = "trio-typing" +version = "0.7.0" +description = "Static type checking support for Trio and related projects" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +mypy-extensions = ">=0.4.2" +trio = ">=0.16.0" +typing-extensions = ">=3.7.4" + +[package.extras] +mypy = ["mypy (>=0.920)"] + [[package]] name = "typed-ast" version = "1.5.2" @@ -834,19 +901,26 @@ python-versions = ">=3.6" docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] +[extras] +trio = ["trio"] + [metadata] lock-version = "1.1" -python-versions = "^3.6" -content-hash = "0021159270f76a4837a7240f8c34959323722ff896baf7bfccdf5cf08034f544" +python-versions = "^3.6,>=3.6.2" +content-hash = "cafb9065d6eb88e9584296e6e24631f986c6c773c13700826c9665a66aa2278a" [metadata.files] alabaster = [ {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, ] -appdirs = [ - {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, - {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, +anyio = [ + {file = "anyio-3.5.0-py3-none-any.whl", hash = "sha256:b5fa16c5ff93fa1046f2eeb5bbff2dad4d3514d6cda61d02816dba34fa8c3c2e"}, + {file = "anyio-3.5.0.tar.gz", hash = "sha256:a0aeffe2fb1fdf374a8e4b471444f0f3ac4fb9f5a5b542b48824475e0042a5a6"}, +] +async-generator = [ + {file = "async_generator-1.10-py3-none-any.whl", hash = "sha256:01c7bf666359b4967d2cda0000cc2e4af16a0ae098cbffcb8472fb9e8ad6585b"}, + {file = "async_generator-1.10.tar.gz", hash = "sha256:6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144"}, ] atomicwrites = [ {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, @@ -861,7 +935,6 @@ babel = [ {file = "Babel-2.9.1.tar.gz", hash = "sha256:bc0c176f9f6a994582230df350aa6e05ba2ebe4b3ac317eab29d9be5d2768da0"}, ] black = [ - {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"}, {file = "black-22.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1297c63b9e1b96a3d0da2d85d11cd9bf8664251fd69ddac068b98dc4f34f73b6"}, {file = "black-22.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2ff96450d3ad9ea499fc4c60e425a1439c2120cbbc1ab959ff20f7c76ec7e866"}, {file = "black-22.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e21e1f1efa65a50e3960edd068b6ae6d64ad6235bd8bfea116a03b21836af71"}, @@ -898,6 +971,58 @@ certifi = [ {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, ] +cffi = [ + {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"}, + {file = "cffi-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0"}, + {file = "cffi-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14"}, + {file = "cffi-1.15.0-cp27-cp27m-win32.whl", hash = "sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474"}, + {file = "cffi-1.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6"}, + {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27"}, + {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023"}, + {file = "cffi-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2"}, + {file = "cffi-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382"}, + {file = "cffi-1.15.0-cp310-cp310-win32.whl", hash = "sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55"}, + {file = "cffi-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0"}, + {file = "cffi-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605"}, + {file = "cffi-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e"}, + {file = "cffi-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc"}, + {file = "cffi-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7"}, + {file = "cffi-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66"}, + {file = "cffi-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029"}, + {file = "cffi-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6"}, + {file = "cffi-1.15.0-cp38-cp38-win32.whl", hash = "sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c"}, + {file = "cffi-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443"}, + {file = "cffi-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a"}, + {file = "cffi-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8"}, + {file = "cffi-1.15.0-cp39-cp39-win32.whl", hash = "sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a"}, + {file = "cffi-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139"}, + {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"}, +] charset-normalizer = [ {file = "charset-normalizer-2.0.11.tar.gz", hash = "sha256:98398a9d69ee80548c762ba991a4728bfc3836768ed226b3945908d1a688371c"}, {file = "charset_normalizer-2.0.11-py3-none-any.whl", hash = "sha256:2842d8f5e82a1f6aa437380934d5e1cd4fcf2003b06fed6940769c164a480a45"}, @@ -914,6 +1039,9 @@ colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] +contextvars = [ + {file = "contextvars-2.4.tar.gz", hash = "sha256:f38c908aaa59c14335eeea12abea5f443646216c4e29380d7bf34d2018e2c39e"}, +] coverage = [ {file = "coverage-6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6dbc1536e105adda7a6312c778f15aaabe583b0e9a0b0a324990334fd458c94b"}, {file = "coverage-6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:174cf9b4bef0db2e8244f82059a5a72bd47e1d40e71c68ab055425172b16b7d0"}, @@ -991,6 +1119,57 @@ imagesize = [ {file = "imagesize-1.3.0-py2.py3-none-any.whl", hash = "sha256:1db2f82529e53c3e929e8926a1fa9235aa82d0bd0c580359c67ec31b2fddaa8c"}, {file = "imagesize-1.3.0.tar.gz", hash = "sha256:cd1750d452385ca327479d45b64d9c7729ecf0b3969a58148298c77092261f9d"}, ] +immutables = [ + {file = "immutables-0.17-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cab10d65a29b2019fffd7a3924f6965a8f785e7bd409641ce36ab2d3335f88c4"}, + {file = "immutables-0.17-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f73088c9b8595ddfd45a5658f8cce0cb3ae6e5890458381fccba3ed3035081d4"}, + {file = "immutables-0.17-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef632832fa1acae6861d83572b866126f9e35706ab6e581ce6b175b3e0b7a3c4"}, + {file = "immutables-0.17-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0efdcec7b63859b41f794ffa0cd0d6dc87e77d1be4ff0ec23471a3a1e719235f"}, + {file = "immutables-0.17-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3eca96f12bc1535657d24eae2c69816d0b22c4a4bc7f4753115e028a137e8dad"}, + {file = "immutables-0.17-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:01a25b1056754aa486afea5471ca348410d77f458477ccb6fa3baf2d3e3ff3d5"}, + {file = "immutables-0.17-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c41a6648f7355f1241da677c418edae56fdc45af19ad3540ca8a1e7a81606a7a"}, + {file = "immutables-0.17-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0b578bba11bd8ae55dee9536edf8d82be18463d15d4b4c9827e27eeeb73826bf"}, + {file = "immutables-0.17-cp310-cp310-win32.whl", hash = "sha256:a28682e115191e909673aedb9ccea3377da3a6a929f8bd86982a2a76bdfa89db"}, + {file = "immutables-0.17-cp310-cp310-win_amd64.whl", hash = "sha256:293ddb681502945f29b3065e688a962e191e752320040892316b9dd1e3b9c8c9"}, + {file = "immutables-0.17-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ec04fc7d9f76f26d82a5d9d1715df0409d0096309828fc46cd1a2067c7fbab95"}, + {file = "immutables-0.17-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f024f25e9fda42251a2b2167668ca70678c19fb3ab6ed509cef0b4b431d0ff73"}, + {file = "immutables-0.17-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b02083b2052cc201ac5cbd38f34a5da21fcd51016cb4ddd1fb43d7dc113eac17"}, + {file = "immutables-0.17-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea32db31afb82d8369e98f85c5b815ff81610a12fbc837830a34388f1b56f080"}, + {file = "immutables-0.17-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:898a9472d1dd3d17f291114395a1be65be035355fc65af0b2c88238f8fbeaa62"}, + {file = "immutables-0.17-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:736dd3d88d44da0ee48804792bd095c01a344c5d1b0f10beeb9ccb3a00b9c19d"}, + {file = "immutables-0.17-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:15ff4139720f79b902f435a25e3c00f9c8adcc41d79bed64b7e51ae36cfe9620"}, + {file = "immutables-0.17-cp36-cp36m-win32.whl", hash = "sha256:4f018a6c4c3689b82f763ad4f84dec6aa91c83981db7f6bafef963f036e5e815"}, + {file = "immutables-0.17-cp36-cp36m-win_amd64.whl", hash = "sha256:d7400a6753b292ac80102ed026efa8da2c3fedd50c443924cbe9b6448d3b19e4"}, + {file = "immutables-0.17-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f7a6e0380bddb99c46bb3f12ae5eee9a23d6a66d99bbf0fb10fa552f935c2e8d"}, + {file = "immutables-0.17-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7696c42d1f9a16ecda0ee46229848df8706973690b45e8a090d995d647a5ec57"}, + {file = "immutables-0.17-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:892b6a1619cd8c398fa70302c4cfa9768a694377639330e7a58cc7be111ab23e"}, + {file = "immutables-0.17-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89093d5a85357250b1d5ae218fdcfdbac4097cbb2d8b55004aa7a2ca2a00a09f"}, + {file = "immutables-0.17-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99a8bc6d0623300eb46beea74f7a5061968fb3efc4e072f23f6c0b21c588238d"}, + {file = "immutables-0.17-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:00380474f8e3b4a2eeb06ce694e0e3cb85a144919140a2b3116defb6c1587471"}, + {file = "immutables-0.17-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:078e3ed63be0ac36523b80bbabbfb1bb57e55009f4efb5650b0e3b3ed569c3f1"}, + {file = "immutables-0.17-cp37-cp37m-win32.whl", hash = "sha256:14905aecc62b318d86045dcf8d35ef2063803d9d331aeccd88958f03caadc7b0"}, + {file = "immutables-0.17-cp37-cp37m-win_amd64.whl", hash = "sha256:3774d403d1570105a1da2e00c38ce3f04065fd1deff04cf998f8d8e946d0ae13"}, + {file = "immutables-0.17-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e5a9caee1b99eccf1447056ae6bda77edd15c357421293e81fa1a4f28e83448a"}, + {file = "immutables-0.17-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fed1e1baf1de1bc94a0310da29814892064928d7d40ff5a3b86bcd11d5e7cfff"}, + {file = "immutables-0.17-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d7daa340d76747ba5a8f64816b48def74bd4be45a9508073b34fa954d099fba"}, + {file = "immutables-0.17-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4644c29fe07fb92ba84b26659708e1799fecaaf781214adf13edd8a4d7495a9"}, + {file = "immutables-0.17-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1e9ea0e2a31db44fb01617ff875d4c26f962696e1c5ff11ed7767c2d8dedac4"}, + {file = "immutables-0.17-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:64100dfdb29fae2bc84748fff5d66dd6b3997806c717eeb75f7099aeee9b1878"}, + {file = "immutables-0.17-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5f933e5bf6f2c1afb24bc2fc8bea8b132096a4a6ba54f36be59787981f3e50ff"}, + {file = "immutables-0.17-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9508a087a47f9f9506adf2fa8383ab14c46a222b57eea8612bc4c2aa9a9550fe"}, + {file = "immutables-0.17-cp38-cp38-win32.whl", hash = "sha256:dfd2c63f15d1e5ea1ed2a05b7c602b5f61a64337415d299df20e103a57ae4906"}, + {file = "immutables-0.17-cp38-cp38-win_amd64.whl", hash = "sha256:301c539660c988c5b24051ccad1e36c040a916f1e58fa3e245e3122fc50dd28d"}, + {file = "immutables-0.17-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:563bc2ddbe75c10faa3b4b0206870653b44a231b97ed23cff8ab8aff503d922d"}, + {file = "immutables-0.17-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f621ea6130393cd14d0fbd35b306d4dc70bcd0fda550a8cd313db8015e34ca60"}, + {file = "immutables-0.17-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57c2d1b16b716bca70345db334dd6a861bf45c46cb11bb1801277f8a9012e864"}, + {file = "immutables-0.17-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a08e1a80bd8c5df72c2bf0af24a37ceec17e8ffdb850ed5a62d0bba1d4d86018"}, + {file = "immutables-0.17-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b99155ad112149d43208c611c6c42f19e16716526dacc0fcc16736d2f5d2e20"}, + {file = "immutables-0.17-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ed71e736f8fb82545d00c8969dbc167547c15e85729058edbed3c03b94fca86c"}, + {file = "immutables-0.17-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:19e4b8e5810dd7cab63fa700373f787a369d992166eabc23f4b962e5704d33c5"}, + {file = "immutables-0.17-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:305062012497d4c4a70fe35e20cef2c6f65744e721b04671092a63354799988d"}, + {file = "immutables-0.17-cp39-cp39-win32.whl", hash = "sha256:f5c6bd012384a8d6af7bb25675719214d76640fe6c336e2b5fba9eef1407ae6a"}, + {file = "immutables-0.17-cp39-cp39-win_amd64.whl", hash = "sha256:615ab26873a794559ccaf4e0e9afdb5aefad0867c15262ba64a55a12a5a41573"}, + {file = "immutables-0.17.tar.gz", hash = "sha256:ad894446355b6f5289a9c84fb46f7c47c6ef2b1bfbdd2be6cb177dbb7f1587ad"}, +] importlib-metadata = [ {file = "importlib_metadata-4.2.0-py3-none-any.whl", hash = "sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b"}, {file = "importlib_metadata-4.2.0.tar.gz", hash = "sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31"}, @@ -1073,6 +1252,10 @@ mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, ] +outcome = [ + {file = "outcome-1.1.0-py2.py3-none-any.whl", hash = "sha256:c7dd9375cfd3c12db9801d080a3b63d4b0a261aa996c4c13152380587288d958"}, + {file = "outcome-1.1.0.tar.gz", hash = "sha256:e862f01d4e626e63e8f92c38d1f8d5546d3f9cce989263c521b2e7990d186967"}, +] packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, @@ -1104,6 +1287,10 @@ pycodestyle = [ {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, ] +pycparser = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] pyflakes = [ {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, @@ -1120,12 +1307,6 @@ pytest = [ {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, ] -pytest-asyncio = [ - {file = "pytest-asyncio-0.16.0.tar.gz", hash = "sha256:7496c5977ce88c34379df64a66459fe395cd05543f0a2f837016e7144391fcfb"}, - {file = "pytest_asyncio-0.16.0-py3-none-any.whl", hash = "sha256:5f2a21273c47b331ae6aa5b36087047b4899e40f03f18397c0e65fa5cca54e9b"}, - {file = "pytest-asyncio-0.17.2.tar.gz", hash = "sha256:6d895b02432c028e6957d25fc936494e78c6305736e785d9fee408b1efbc7ff4"}, - {file = "pytest_asyncio-0.17.2-py3-none-any.whl", hash = "sha256:e0fe5dbea40516b661ef1bcfe0bd9461c2847c4ef4bb40012324f2454fb7d56d"}, -] pytest-benchmark = [ {file = "pytest-benchmark-3.4.1.tar.gz", hash = "sha256:40e263f912de5a81d891619032983557d62a3d85843f9a9f30b98baea0cd7b47"}, {file = "pytest_benchmark-3.4.1-py2.py3-none-any.whl", hash = "sha256:36d2b08c4882f6f997fd3126a3d6dfd70f3249cde178ed8bbc0b73db7c20f809"}, @@ -1146,82 +1327,6 @@ pytz = [ {file = "pytz-2021.3-py2.py3-none-any.whl", hash = "sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c"}, {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"}, ] -regex = [ - {file = "regex-2022.1.18-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:34316bf693b1d2d29c087ee7e4bb10cdfa39da5f9c50fa15b07489b4ab93a1b5"}, - {file = "regex-2022.1.18-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a0b9f6a1a15d494b35f25ed07abda03209fa76c33564c09c9e81d34f4b919d7"}, - {file = "regex-2022.1.18-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f99112aed4fb7cee00c7f77e8b964a9b10f69488cdff626ffd797d02e2e4484f"}, - {file = "regex-2022.1.18-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a2bf98ac92f58777c0fafc772bf0493e67fcf677302e0c0a630ee517a43b949"}, - {file = "regex-2022.1.18-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8618d9213a863c468a865e9d2ec50221015f7abf52221bc927152ef26c484b4c"}, - {file = "regex-2022.1.18-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b52cc45e71657bc4743a5606d9023459de929b2a198d545868e11898ba1c3f59"}, - {file = "regex-2022.1.18-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e12949e5071c20ec49ef00c75121ed2b076972132fc1913ddf5f76cae8d10b4"}, - {file = "regex-2022.1.18-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b02e3e72665cd02afafb933453b0c9f6c59ff6e3708bd28d0d8580450e7e88af"}, - {file = "regex-2022.1.18-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:abfcb0ef78df0ee9df4ea81f03beea41849340ce33a4c4bd4dbb99e23ec781b6"}, - {file = "regex-2022.1.18-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6213713ac743b190ecbf3f316d6e41d099e774812d470422b3a0f137ea635832"}, - {file = "regex-2022.1.18-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:61ebbcd208d78658b09e19c78920f1ad38936a0aa0f9c459c46c197d11c580a0"}, - {file = "regex-2022.1.18-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:b013f759cd69cb0a62de954d6d2096d648bc210034b79b1881406b07ed0a83f9"}, - {file = "regex-2022.1.18-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9187500d83fd0cef4669385cbb0961e227a41c0c9bc39219044e35810793edf7"}, - {file = "regex-2022.1.18-cp310-cp310-win32.whl", hash = "sha256:94c623c331a48a5ccc7d25271399aff29729fa202c737ae3b4b28b89d2b0976d"}, - {file = "regex-2022.1.18-cp310-cp310-win_amd64.whl", hash = "sha256:1a171eaac36a08964d023eeff740b18a415f79aeb212169080c170ec42dd5184"}, - {file = "regex-2022.1.18-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:49810f907dfe6de8da5da7d2b238d343e6add62f01a15d03e2195afc180059ed"}, - {file = "regex-2022.1.18-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d2f5c3f7057530afd7b739ed42eb04f1011203bc5e4663e1e1d01bb50f813e3"}, - {file = "regex-2022.1.18-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85ffd6b1cb0dfb037ede50ff3bef80d9bf7fa60515d192403af6745524524f3b"}, - {file = "regex-2022.1.18-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ba37f11e1d020969e8a779c06b4af866ffb6b854d7229db63c5fdddfceaa917f"}, - {file = "regex-2022.1.18-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:637e27ea1ebe4a561db75a880ac659ff439dec7f55588212e71700bb1ddd5af9"}, - {file = "regex-2022.1.18-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:37978254d9d00cda01acc1997513f786b6b971e57b778fbe7c20e30ae81a97f3"}, - {file = "regex-2022.1.18-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e54a1eb9fd38f2779e973d2f8958fd575b532fe26013405d1afb9ee2374e7ab8"}, - {file = "regex-2022.1.18-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:768632fd8172ae03852e3245f11c8a425d95f65ff444ce46b3e673ae5b057b74"}, - {file = "regex-2022.1.18-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:de2923886b5d3214be951bc2ce3f6b8ac0d6dfd4a0d0e2a4d2e5523d8046fdfb"}, - {file = "regex-2022.1.18-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:1333b3ce73269f986b1fa4d5d395643810074dc2de5b9d262eb258daf37dc98f"}, - {file = "regex-2022.1.18-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:d19a34f8a3429bd536996ad53597b805c10352a8561d8382e05830df389d2b43"}, - {file = "regex-2022.1.18-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d2f355a951f60f0843f2368b39970e4667517e54e86b1508e76f92b44811a8a"}, - {file = "regex-2022.1.18-cp36-cp36m-win32.whl", hash = "sha256:2245441445099411b528379dee83e56eadf449db924648e5feb9b747473f42e3"}, - {file = "regex-2022.1.18-cp36-cp36m-win_amd64.whl", hash = "sha256:25716aa70a0d153cd844fe861d4f3315a6ccafce22b39d8aadbf7fcadff2b633"}, - {file = "regex-2022.1.18-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7e070d3aef50ac3856f2ef5ec7214798453da878bb5e5a16c16a61edf1817cc3"}, - {file = "regex-2022.1.18-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22709d701e7037e64dae2a04855021b62efd64a66c3ceed99dfd684bfef09e38"}, - {file = "regex-2022.1.18-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9099bf89078675c372339011ccfc9ec310310bf6c292b413c013eb90ffdcafc"}, - {file = "regex-2022.1.18-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04611cc0f627fc4a50bc4a9a2e6178a974c6a6a4aa9c1cca921635d2c47b9c87"}, - {file = "regex-2022.1.18-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:552a39987ac6655dad4bf6f17dd2b55c7b0c6e949d933b8846d2e312ee80005a"}, - {file = "regex-2022.1.18-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e031899cb2bc92c0cf4d45389eff5b078d1936860a1be3aa8c94fa25fb46ed8"}, - {file = "regex-2022.1.18-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2dacb3dae6b8cc579637a7b72f008bff50a94cde5e36e432352f4ca57b9e54c4"}, - {file = "regex-2022.1.18-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e5c31d70a478b0ca22a9d2d76d520ae996214019d39ed7dd93af872c7f301e52"}, - {file = "regex-2022.1.18-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bb804c7d0bfbd7e3f33924ff49757de9106c44e27979e2492819c16972ec0da2"}, - {file = "regex-2022.1.18-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:36b2d700a27e168fa96272b42d28c7ac3ff72030c67b32f37c05616ebd22a202"}, - {file = "regex-2022.1.18-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:16f81025bb3556eccb0681d7946e2b35ff254f9f888cff7d2120e8826330315c"}, - {file = "regex-2022.1.18-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:da80047524eac2acf7c04c18ac7a7da05a9136241f642dd2ed94269ef0d0a45a"}, - {file = "regex-2022.1.18-cp37-cp37m-win32.whl", hash = "sha256:6ca45359d7a21644793de0e29de497ef7f1ae7268e346c4faf87b421fea364e6"}, - {file = "regex-2022.1.18-cp37-cp37m-win_amd64.whl", hash = "sha256:38289f1690a7e27aacd049e420769b996826f3728756859420eeee21cc857118"}, - {file = "regex-2022.1.18-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6014038f52b4b2ac1fa41a58d439a8a00f015b5c0735a0cd4b09afe344c94899"}, - {file = "regex-2022.1.18-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0b5d6f9aed3153487252d00a18e53f19b7f52a1651bc1d0c4b5844bc286dfa52"}, - {file = "regex-2022.1.18-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9d24b03daf7415f78abc2d25a208f234e2c585e5e6f92f0204d2ab7b9ab48e3"}, - {file = "regex-2022.1.18-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bf594cc7cc9d528338d66674c10a5b25e3cde7dd75c3e96784df8f371d77a298"}, - {file = "regex-2022.1.18-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd914db437ec25bfa410f8aa0aa2f3ba87cdfc04d9919d608d02330947afaeab"}, - {file = "regex-2022.1.18-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90b6840b6448203228a9d8464a7a0d99aa8fa9f027ef95fe230579abaf8a6ee1"}, - {file = "regex-2022.1.18-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11772be1eb1748e0e197a40ffb82fb8fd0d6914cd147d841d9703e2bef24d288"}, - {file = "regex-2022.1.18-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a602bdc8607c99eb5b391592d58c92618dcd1537fdd87df1813f03fed49957a6"}, - {file = "regex-2022.1.18-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7e26eac9e52e8ce86f915fd33380f1b6896a2b51994e40bb094841e5003429b4"}, - {file = "regex-2022.1.18-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:519c0b3a6fbb68afaa0febf0d28f6c4b0a1074aefc484802ecb9709faf181607"}, - {file = "regex-2022.1.18-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3c7ea86b9ca83e30fa4d4cd0eaf01db3ebcc7b2726a25990966627e39577d729"}, - {file = "regex-2022.1.18-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:51f02ca184518702975b56affde6c573ebad4e411599005ce4468b1014b4786c"}, - {file = "regex-2022.1.18-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:385ccf6d011b97768a640e9d4de25412204fbe8d6b9ae39ff115d4ff03f6fe5d"}, - {file = "regex-2022.1.18-cp38-cp38-win32.whl", hash = "sha256:1f8c0ae0a0de4e19fddaaff036f508db175f6f03db318c80bbc239a1def62d02"}, - {file = "regex-2022.1.18-cp38-cp38-win_amd64.whl", hash = "sha256:760c54ad1b8a9b81951030a7e8e7c3ec0964c1cb9fee585a03ff53d9e531bb8e"}, - {file = "regex-2022.1.18-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:93c20777a72cae8620203ac11c4010365706062aa13aaedd1a21bb07adbb9d5d"}, - {file = "regex-2022.1.18-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6aa427c55a0abec450bca10b64446331b5ca8f79b648531138f357569705bc4a"}, - {file = "regex-2022.1.18-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c38baee6bdb7fe1b110b6b3aaa555e6e872d322206b7245aa39572d3fc991ee4"}, - {file = "regex-2022.1.18-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:752e7ddfb743344d447367baa85bccd3629c2c3940f70506eb5f01abce98ee68"}, - {file = "regex-2022.1.18-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8acef4d8a4353f6678fd1035422a937c2170de58a2b29f7da045d5249e934101"}, - {file = "regex-2022.1.18-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c73d2166e4b210b73d1429c4f1ca97cea9cc090e5302df2a7a0a96ce55373f1c"}, - {file = "regex-2022.1.18-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24c89346734a4e4d60ecf9b27cac4c1fee3431a413f7aa00be7c4d7bbacc2c4d"}, - {file = "regex-2022.1.18-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:596f5ae2eeddb79b595583c2e0285312b2783b0ec759930c272dbf02f851ff75"}, - {file = "regex-2022.1.18-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ecfe51abf7f045e0b9cdde71ca9e153d11238679ef7b5da6c82093874adf3338"}, - {file = "regex-2022.1.18-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1d6301f5288e9bdca65fab3de6b7de17362c5016d6bf8ee4ba4cbe833b2eda0f"}, - {file = "regex-2022.1.18-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:93cce7d422a0093cfb3606beae38a8e47a25232eea0f292c878af580a9dc7605"}, - {file = "regex-2022.1.18-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cf0db26a1f76aa6b3aa314a74b8facd586b7a5457d05b64f8082a62c9c49582a"}, - {file = "regex-2022.1.18-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:defa0652696ff0ba48c8aff5a1fac1eef1ca6ac9c660b047fc8e7623c4eb5093"}, - {file = "regex-2022.1.18-cp39-cp39-win32.whl", hash = "sha256:6db1b52c6f2c04fafc8da17ea506608e6be7086715dab498570c3e55e4f8fbd1"}, - {file = "regex-2022.1.18-cp39-cp39-win_amd64.whl", hash = "sha256:ebaeb93f90c0903233b11ce913a7cb8f6ee069158406e056f884854c737d2442"}, - {file = "regex-2022.1.18.tar.gz", hash = "sha256:97f32dc03a8054a4c4a5ab5d761ed4861e828b2c200febd4e46857069a483916"}, -] requests = [ {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, @@ -1230,10 +1335,18 @@ six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +sniffio = [ + {file = "sniffio-1.2.0-py3-none-any.whl", hash = "sha256:471b71698eac1c2112a40ce2752bb2f4a4814c22a54a3eed3676bc0f5ca9f663"}, + {file = "sniffio-1.2.0.tar.gz", hash = "sha256:c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de"}, +] snowballstemmer = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] +sortedcontainers = [ + {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, + {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, +] sphinx = [ {file = "Sphinx-4.3.2-py3-none-any.whl", hash = "sha256:6a11ea5dd0bdb197f9c2abc2e0ce73e01340464feaece525e64036546d24c851"}, {file = "Sphinx-4.3.2.tar.gz", hash = "sha256:0a8836751a68306b3fe97ecbe44db786f8479c3bf4b80e3a7f5c838657b4698c"}, @@ -1278,6 +1391,14 @@ tox = [ {file = "tox-3.24.5-py2.py3-none-any.whl", hash = "sha256:be3362472a33094bce26727f5f771ca0facf6dafa217f65875314e9a6600c95c"}, {file = "tox-3.24.5.tar.gz", hash = "sha256:67e0e32c90e278251fea45b696d0fef3879089ccbe979b0c556d35d5a70e2993"}, ] +trio = [ + {file = "trio-0.20.0-py3-none-any.whl", hash = "sha256:fb2d48e4eab0dfb786a472cd514aaadc71e3445b203bc300bad93daa75d77c1a"}, + {file = "trio-0.20.0.tar.gz", hash = "sha256:670a52d3115d0e879e1ac838a4eb999af32f858163e3a704fe4839de2a676070"}, +] +trio-typing = [ + {file = "trio-typing-0.7.0.tar.gz", hash = "sha256:5bb2184de144c15f2cc252bba4fd167125758df7339c4f7bc40538940aefa3b9"}, + {file = "trio_typing-0.7.0-py3-none-any.whl", hash = "sha256:156ba760f444aa2f8af43f4459d462415fc297234feb27018e4e902bb62a122b"}, +] typed-ast = [ {file = "typed_ast-1.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:183b183b7771a508395d2cbffd6db67d6ad52958a5fdc99f450d954003900266"}, {file = "typed_ast-1.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:676d051b1da67a852c0447621fdd11c4e104827417bf216092ec3e286f7da596"}, diff --git a/pyproject.toml b/pyproject.toml index 1e835183..b84382bc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,6 +45,7 @@ packages = [ python = "^3.6,>=3.6.2" typing-extensions = { version = "^4.0", python = "<3.8" } anyio = "^3.5.0" +trio = { version = "^0.20.0", python = ">=3.7", optional=true} [tool.poetry.dev-dependencies] pytest = "^6.2" @@ -63,7 +64,11 @@ sphinx_rtd_theme = ">=1,<2" check-manifest = ">=0.47,<1" bump2version = ">=1.0,<2" tox = "^3.24" -trio = { version = "^0.20.0", python = ">=3.7"} +trio-typing = "^0.7.0" + +[tool.poetry.extras] +trio = ["trio"] + [tool.black] target-version = ['py36', 'py37', 'py38', 'py39', 'py310'] diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 01ec288a..d36ee742 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -1,4 +1,6 @@ -from asyncio import ensure_future, gather +from anyio import create_task_group, ExceptionGroup +from asyncio import ensure_future, CancelledError +from sniffio import current_async_library from collections.abc import Mapping from inspect import isawaitable from typing import ( @@ -444,13 +446,16 @@ def execute_fields( # field, which is possibly a coroutine object. Return a coroutine object that # will yield this same map, but with any coroutines awaited in parallel and # replaced with the values they yielded. + async def await_field(field: str) -> None: + results[field] = await results[field] + async def get_results() -> Dict[str, Any]: - results.update( - zip( - awaitable_fields, - await gather(*(results[field] for field in awaitable_fields)), - ) - ) + try: + async with create_task_group() as tg: + for field in awaitable_fields: + tg.start_soon(await_field, field) + except ExceptionGroup as exc: + raise exc.exceptions[0] return results return get_results() @@ -531,6 +536,10 @@ async def await_result() -> Any: return await completed return completed except Exception as raw_error: + if isinstance( + raw_error, CancelledError + ): # pragma: no cover (Python >= 3.8) + raise error = located_error(raw_error, field_nodes, path.as_list()) self.handle_field_error(error, return_type) return None @@ -546,6 +555,10 @@ async def await_completed() -> Any: try: return await completed except Exception as raw_error: + if isinstance( + raw_error, CancelledError + ): # pragma: no cover (Python >= 3.8) + raise error = located_error(raw_error, field_nodes, path.as_list()) self.handle_field_error(error, return_type) return None @@ -714,6 +727,10 @@ async def await_completed(item: Any, item_path: Path) -> Any: return await completed return completed except Exception as raw_error: + if isinstance( + raw_error, CancelledError + ): # pragma: no cover (Python >= 3.8) + raise error = located_error( raw_error, field_nodes, item_path.as_list() ) @@ -732,6 +749,10 @@ async def await_completed(item: Any, item_path: Path) -> Any: try: return await item except Exception as raw_error: + if isinstance( + raw_error, CancelledError + ): # pragma: no cover (Python >= 3.8) + raise error = located_error( raw_error, field_nodes, item_path.as_list() ) @@ -752,14 +773,16 @@ async def await_completed(item: Any, item_path: Path) -> Any: return completed_results # noinspection PyShadowingNames + async def await_index(index: int) -> None: + completed_results[index] = await completed_results[index] + async def get_completed_results() -> List[Any]: - for index, result in zip( - awaitable_indices, - await gather( - *(completed_results[index] for index in awaitable_indices) - ), - ): - completed_results[index] = result + try: + async with create_task_group() as tg: + for index in awaitable_indices: + tg.start_soon(await_index, index) + except ExceptionGroup as exc: + raise exc.exceptions[0] return completed_results return get_completed_results() @@ -1096,7 +1119,8 @@ def execute_sync( # Assert that the execution was synchronous. if isawaitable(result): - ensure_future(cast(Awaitable[ExecutionResult], result)).cancel() + if current_async_library() == "asyncio": + ensure_future(cast(Awaitable[ExecutionResult], result)).cancel() raise RuntimeError("GraphQL execution failed to complete synchronously.") return cast(ExecutionResult, result) @@ -1216,8 +1240,18 @@ def default_type_resolver( if awaitable_is_type_of_results: # noinspection PyShadowingNames + is_type_of_results = [None for _ in awaitable_is_type_of_results] + + async def await_is_type_of_result(index: int) -> None: + is_type_of_results[index] = await awaitable_is_type_of_results[index] + async def get_type() -> Optional[str]: - is_type_of_results = await gather(*awaitable_is_type_of_results) + try: + async with create_task_group() as tg: + for index in range(len(awaitable_is_type_of_results)): + tg.start_soon(await_is_type_of_result, index) + except ExceptionGroup as exc: + raise exc.exceptions[0] for is_type_of_result, type_ in zip(is_type_of_results, awaitable_types): if is_type_of_result: return type_.name diff --git a/src/graphql/execution/map_async_iterator.py b/src/graphql/execution/map_async_iterator.py index 43400fd3..87f9662c 100644 --- a/src/graphql/execution/map_async_iterator.py +++ b/src/graphql/execution/map_async_iterator.py @@ -1,7 +1,12 @@ -from asyncio import CancelledError, Event, Task, ensure_future, wait -from concurrent.futures import FIRST_COMPLETED +from anyio import ( + get_cancelled_exc_class, + create_task_group, + Event, + CancelScope, +) +from anyio.abc import TaskGroup from inspect import isasyncgen, isawaitable -from typing import cast, Any, AsyncIterable, Callable, Optional, Set, Type, Union +from typing import cast, Any, AsyncIterable, Callable, Optional, Type, Union from types import TracebackType __all__ = ["MapAsyncIterator"] @@ -27,6 +32,10 @@ def __aiter__(self) -> "MapAsyncIterator": """Get the iterator object.""" return self + async def _wait_for_close(self, tg: TaskGroup) -> None: + await self._close_event.wait() + tg.cancel_scope.cancel() + async def __anext__(self) -> Any: """Get the next value of the iterator.""" if self.is_closed: @@ -34,31 +43,29 @@ async def __anext__(self) -> Any: raise StopAsyncIteration value = await self.iterator.__anext__() else: - aclose = ensure_future(self._close_event.wait()) - anext = ensure_future(self.iterator.__anext__()) - + close_evt = None + iterator_exc = None try: - pending: Set[Task] = ( - await wait([aclose, anext], return_when=FIRST_COMPLETED) - )[1] - except CancelledError: - # cancel underlying tasks and close - aclose.cancel() - anext.cancel() - await self.aclose() - raise # re-raise the cancellation - - for task in pending: - task.cancel() - - if aclose.done(): + async with create_task_group() as tg: + # we need to store the current event, it could be reset + close_evt = self._close_event + tg.start_soon(self._wait_for_close, tg) + try: + value = await self.iterator.__anext__() + except BaseException as exc: + iterator_exc = exc + tg.cancel_scope.cancel() + except BaseException: + # We ignore this and use the iterator exception (if any) + pass + if close_evt is not None and close_evt.is_set(): + # closed from outside via `is_closed=True / aclose` raise StopAsyncIteration - - error = anext.exception() - if error: - raise error - - value = anext.result() + if iterator_exc is not None: + if isinstance(iterator_exc, get_cancelled_exc_class()): + with CancelScope(shield=True): + await self.aclose() + raise iterator_exc result = self.callback(value) @@ -111,5 +118,5 @@ def is_closed(self, value: bool) -> None: """Mark the iterator as closed.""" if value: self._close_event.set() - else: - self._close_event.clear() + elif self._close_event.is_set(): + self._close_event = Event() diff --git a/src/graphql/execution/subscribe.py b/src/graphql/execution/subscribe.py index 21fe4db3..c557ee9c 100644 --- a/src/graphql/execution/subscribe.py +++ b/src/graphql/execution/subscribe.py @@ -1,3 +1,4 @@ +from asyncio import CancelledError from inspect import isawaitable from typing import ( Any, @@ -209,4 +210,6 @@ async def execute_subscription(context: ExecutionContext) -> AsyncIterable[Any]: return event_stream except Exception as error: + if isinstance(error, CancelledError): # pragma: no cover (Python >= 3.8) + raise raise located_error(error, field_nodes, path.as_list()) diff --git a/src/graphql/graphql.py b/src/graphql/graphql.py index c2e804cd..85fd22b8 100644 --- a/src/graphql/graphql.py +++ b/src/graphql/graphql.py @@ -1,4 +1,5 @@ from asyncio import ensure_future +from sniffio import current_async_library from inspect import isawaitable from typing import Any, Awaitable, Callable, Dict, Optional, Union, Type, cast @@ -143,7 +144,8 @@ def graphql_sync( # Assert that the execution was synchronous. if isawaitable(result): - ensure_future(cast(Awaitable[ExecutionResult], result)).cancel() + if current_async_library() == "asyncio": + ensure_future(cast(Awaitable[ExecutionResult], result)).cancel() raise RuntimeError("GraphQL execution failed to complete synchronously.") return cast(ExecutionResult, result) diff --git a/src/graphql/pyutils/__init__.py b/src/graphql/pyutils/__init__.py index c156de41..9cb4b162 100644 --- a/src/graphql/pyutils/__init__.py +++ b/src/graphql/pyutils/__init__.py @@ -32,6 +32,7 @@ from .path import Path from .print_path_list import print_path_list from .simple_pub_sub import SimplePubSub, SimplePubSubIterator +from .broadcast_stream import MemoryObjectBroadcastStream, create_broadcast_stream from .undefined import Undefined, UndefinedType __all__ = [ @@ -60,6 +61,8 @@ "print_path_list", "SimplePubSub", "SimplePubSubIterator", + "MemoryObjectBroadcastStream", + "create_broadcast_stream", "Undefined", "UndefinedType", ] diff --git a/src/graphql/pyutils/broadcast_stream.py b/src/graphql/pyutils/broadcast_stream.py new file mode 100644 index 00000000..9bab0463 --- /dev/null +++ b/src/graphql/pyutils/broadcast_stream.py @@ -0,0 +1,148 @@ +from dataclasses import dataclass, field +from typing import Any, List, Optional, Type, Callable +from types import TracebackType + +import anyio +from anyio.streams.memory import MemoryObjectSendStream, MemoryObjectReceiveStream + +__all__ = ["MemoryObjectBroadcastStream", "create_broadcast_stream"] + + +def create_broadcast_stream( + max_buffer_size: float, requires_listeners: bool = False +) -> "MemoryObjectBroadcastStream": + return MemoryObjectBroadcastStream( + BroadcastStreamState(max_buffer_size, requires_listeners) + ) + + +@dataclass +class BroadcastStreamListener: + stream: MemoryObjectSendStream + transform: Optional[Callable[[Any], Any]] = None + + +@dataclass +class BroadcastStreamState: + max_buffer_size: float + requires_listeners: bool + listeners: List[BroadcastStreamListener] = field(default_factory=list) + ref_counter: int = 0 + + +class MemoryObjectBroadcastStream: + _state: BroadcastStreamState + _closed: bool + + def __init__(self, state: BroadcastStreamState): + self._state = state + self._closed = False + self._state.ref_counter += 1 + + async def aclose(self) -> None: + self.close() + + def close(self) -> None: + if not self._closed: + self._closed = True + self._state.ref_counter -= 1 + if self._state.ref_counter == 0: + for listener in self._state.listeners: + listener.stream.close() + self._state.listeners = [] + + def clone(self) -> "MemoryObjectBroadcastStream": + if self._closed: + raise anyio.ClosedResourceError + return MemoryObjectBroadcastStream(self._state) + + async def _send_to_listener( + self, listener: BroadcastStreamListener, item: Any + ) -> None: + try: + if listener.transform is not None: + item = listener.transform(item) + await listener.stream.send(item) + except (anyio.ClosedResourceError, anyio.BrokenResourceError): + with anyio.CancelScope(shield=True): + await listener.stream.aclose() + self._state.listeners.remove(listener) + + async def send(self, item: Any) -> bool: + if self._closed: + raise anyio.ClosedResourceError + async with anyio.create_task_group() as tg: + for listener in self._state.listeners: + tg.start_soon(self._send_to_listener, listener, item) + if not self._state.listeners: + if self._state.requires_listeners: + raise anyio.BrokenResourceError + return False + return True + + def send_nowait(self, item: Any) -> bool: + if self._closed: + raise anyio.ClosedResourceError + for listener in list(self._state.listeners): + stats = listener.stream.statistics() + if stats.open_receive_streams == 0: + listener.stream.close() + self._state.listeners.remove(listener) + # We raise WouldBlock before sending anything + if ( + stats.max_buffer_size <= stats.current_buffer_used + and not stats.tasks_waiting_send + ): + raise anyio.WouldBlock + + for listener in self._state.listeners: + try: + if listener.transform is not None: + listener.stream.send_nowait(listener.transform(item)) + else: + listener.stream.send_nowait(item) + except ( + anyio.ClosedResourceError, + anyio.BrokenResourceError, + ): # pragma: no cover + # we checked all listeners beforehand, this should not happen + listener.stream.close() + self._state.listeners.remove(listener) + except anyio.WouldBlock: # pragma: no cover + assert ( + False # we checked all listeners beforehand, this should not happen + ) + if not self._state.listeners: + if self._state.requires_listeners: + raise anyio.BrokenResourceError + return False + return True + + def get_listener( + self, transform: Optional[Callable[[Any], Any]] = None + ) -> MemoryObjectReceiveStream: + send, receive = anyio.create_memory_object_stream(self._state.max_buffer_size) + self._state.listeners.append(BroadcastStreamListener(send, transform)) + return receive + + def __enter__(self) -> "MemoryObjectBroadcastStream": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + self.close() + + async def __aenter__(self) -> "MemoryObjectBroadcastStream": + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + self.close() diff --git a/src/graphql/pyutils/simple_pub_sub.py b/src/graphql/pyutils/simple_pub_sub.py index 6b1ba050..e44b75e7 100644 --- a/src/graphql/pyutils/simple_pub_sub.py +++ b/src/graphql/pyutils/simple_pub_sub.py @@ -17,6 +17,8 @@ class SimplePubSub: Creates an AsyncIterator from an EventEmitter. Useful for mocking a PubSub system for tests. + + Warning: Only works with an asyncio event loop. """ subscribers: Set[Callable] @@ -39,6 +41,10 @@ def get_subscriber( class SimplePubSubIterator(AsyncIterator): + """ + Warning: Only works with an asyncio event loop. + """ + def __init__(self, pubsub: SimplePubSub, transform: Optional[Callable]) -> None: self.pubsub = pubsub self.transform = transform diff --git a/src/graphql/utilities/ast_from_value.py b/src/graphql/utilities/ast_from_value.py index 0b3413cb..208d9d95 100644 --- a/src/graphql/utilities/ast_from_value.py +++ b/src/graphql/utilities/ast_from_value.py @@ -118,7 +118,7 @@ def ast_from_value(value: Any, type_: GraphQLInputType) -> Optional[ValueNode]: return IntValueNode(value=str(serialized)) if isinstance(serialized, float) and isfinite(serialized): value = str(serialized) - if value.endswith('.0'): + if value.endswith(".0"): value = value[:-2] return FloatValueNode(value=value) diff --git a/tests/benchmarks/test_execution_async.py b/tests/benchmarks/test_execution_async.py index 87b0cb87..1aa42d9b 100644 --- a/tests/benchmarks/test_execution_async.py +++ b/tests/benchmarks/test_execution_async.py @@ -64,7 +64,8 @@ def test_execute_basic_async(anyio_backend, benchmark): @mark.parametrize("anyio_backend", ["trio"]) def test_execute_basic_async_trio(anyio_backend, benchmark): - # TODO: can the trio loop be started beforehand? run benchmark in async function somehow? + # TODO: can the trio loop be started beforehand? + # Can the benchmark be run in an async function somehow? import trio result = benchmark(lambda: trio.run(graphql, schema, "query { user { id, name }}")) diff --git a/tests/execution/test_map_async_iterator.py b/tests/execution/test_map_async_iterator.py index 43f3d8dc..d8a89d4d 100644 --- a/tests/execution/test_map_async_iterator.py +++ b/tests/execution/test_map_async_iterator.py @@ -401,9 +401,9 @@ async def set_done(): # Unblock and watch StopAsyncIteration propagate try: await doubles.aclose() - except: - assert False # ensure that aclose does not raise a StopAsyncIteration - + except StopAsyncIteration: # pragma: no cover + assert False # other exceptions would fail the test anyway + assert done with raises(StopAsyncIteration): @@ -448,6 +448,10 @@ async def aclose(self): iterator.is_closed = False doubles.is_closed = False assert not doubles.is_closed + # ensure is_closed=False is idempotent + close_evt = doubles._close_event + doubles.is_closed = False + assert close_evt == doubles._close_event assert await anext(doubles) == 6 assert not doubles.is_closed @@ -461,6 +465,8 @@ async def aclose(self): @mark.anyio async def can_cancel_async_iterator_while_waiting(): + is_waiting = Event() + class Iterator: def __init__(self): self.is_closed = False @@ -471,6 +477,7 @@ def __aiter__(self): async def __anext__(self): try: + is_waiting.set() await sleep(0.5) return self.value # pragma: no cover except get_cancelled_exc_class(): @@ -494,7 +501,7 @@ async def iterator_task(): async with create_task_group() as tg: tg.start_soon(iterator_task) - await sleep(0.05) + await is_waiting.wait() assert not cancelled assert not doubles.is_closed assert iterator.value == 1 diff --git a/tests/execution/test_parallel.py b/tests/execution/test_parallel.py index fc74025b..8f591ee8 100644 --- a/tests/execution/test_parallel.py +++ b/tests/execution/test_parallel.py @@ -55,12 +55,40 @@ async def resolve(*_args): # raises TimeoutError if not parallel awaitable_result = execute(schema, ast) assert isinstance(awaitable_result, Awaitable) - + with anyio.fail_after(1.0): result = await awaitable_result assert result == ({"foo": True, "bar": True}, None) + @mark.anyio + async def cancel_resolve_fields_in_parallel(): + ast = parse("{foo, bar}") + + async def resolve(*_args): + return await anyio.sleep(5) + + schema = GraphQLSchema( + GraphQLObjectType( + "Type", + { + "foo": GraphQLField(GraphQLString, resolve=resolve), + "bar": GraphQLField(GraphQLString, resolve=resolve), + }, + ) + ) + + awaitable_result = execute(schema, ast) + assert isinstance(awaitable_result, Awaitable) + cancelled = False + with anyio.move_on_after(0.1): + try: + await awaitable_result + except anyio.get_cancelled_exc_class(): + cancelled = True + raise + assert cancelled + @mark.anyio async def resolve_list_in_parallel(): barrier = Barrier(2) @@ -92,6 +120,38 @@ async def resolve_list(*args): assert result == ({"foo": [True, True]}, None) + @mark.anyio + async def cancel_resolve_list_in_parallel(): + async def resolve(*_args): + return await anyio.sleep(5) + + async def resolve_list(*args): + return [resolve(*args), resolve(*args)] + + schema = GraphQLSchema( + GraphQLObjectType( + "Query", + { + "foo": GraphQLField( + GraphQLList(GraphQLBoolean), resolve=resolve_list + ) + }, + ) + ) + + ast = parse("{foo}") + + awaitable_result = execute(schema, ast) + assert isinstance(awaitable_result, Awaitable) + cancelled = False + with anyio.move_on_after(0.1): + try: + await awaitable_result + except anyio.get_cancelled_exc_class(): + cancelled = True + raise + assert cancelled + @mark.anyio async def resolve_is_type_of_in_parallel(): FooType = GraphQLInterfaceType("Foo", {"foo": GraphQLField(GraphQLString)}) @@ -158,3 +218,67 @@ async def is_type_of_baz(obj, *_args): {"foo": [{"foo": "bar", "foobar": 1}, {"foo": "baz", "foobaz": 2}]}, None, ) + + @mark.anyio + async def cancel_resolve_is_type_of_in_parallel(): + FooType = GraphQLInterfaceType("Foo", {"foo": GraphQLField(GraphQLString)}) + + async def is_type_of_bar(obj, *_args): + await anyio.sleep(5) + + BarType = GraphQLObjectType( + "Bar", + {"foo": GraphQLField(GraphQLString), "foobar": GraphQLField(GraphQLInt)}, + interfaces=[FooType], + is_type_of=is_type_of_bar, + ) + + async def is_type_of_baz(obj, *_args): + await anyio.sleep(5) + + BazType = GraphQLObjectType( + "Baz", + {"foo": GraphQLField(GraphQLString), "foobaz": GraphQLField(GraphQLInt)}, + interfaces=[FooType], + is_type_of=is_type_of_baz, + ) + + schema = GraphQLSchema( + GraphQLObjectType( + "Query", + { + "foo": GraphQLField( + GraphQLList(FooType), + resolve=lambda *_args: [ + {"foo": "bar", "foobar": 1}, + {"foo": "baz", "foobaz": 2}, + ], + ) + }, + ), + types=[BarType, BazType], + ) + + ast = parse( + """ + { + foo { + foo + ... on Bar { foobar } + ... on Baz { foobaz } + } + } + """ + ) + + # raises TimeoutError if not parallel + awaitable_result = execute(schema, ast) + assert isinstance(awaitable_result, Awaitable) + cancelled = False + with anyio.move_on_after(0.1): + try: + await awaitable_result + except anyio.get_cancelled_exc_class(): + cancelled = True + raise + assert cancelled diff --git a/tests/execution/test_subscribe.py b/tests/execution/test_subscribe.py index 1b7d678f..533fdbce 100644 --- a/tests/execution/test_subscribe.py +++ b/tests/execution/test_subscribe.py @@ -1,11 +1,11 @@ import anyio - +import math from typing import Any, Dict, List, Callable from pytest import mark, raises from graphql.language import parse -from graphql.pyutils import SimplePubSub +from graphql.pyutils import MemoryObjectBroadcastStream, create_broadcast_stream from graphql.type import ( GraphQLArgument, GraphQLBoolean, @@ -76,7 +76,7 @@ async def anext(iterator): ) -def create_subscription(pubsub: SimplePubSub): +def create_subscription(pubsub: MemoryObjectBroadcastStream): document = parse( """ subscription ($priority: Int = 0) { @@ -110,7 +110,7 @@ def transform(new_email): data: Dict[str, Any] = { "inbox": {"emails": emails}, - "importantEmail": pubsub.get_subscriber(transform), + "importantEmail": pubsub.get_listener(transform), } return subscribe(email_schema, document, data) @@ -466,7 +466,7 @@ async def resolves_to_an_error_if_variables_were_wrong_type(): def describe_subscription_publish_phase(): @mark.anyio async def produces_a_payload_for_multiple_subscribe_in_same_subscription(): - pubsub = SimplePubSub() + pubsub = create_broadcast_stream(math.inf) subscription = await create_subscription(pubsub) assert isinstance(subscription, MapAsyncIterator) @@ -478,7 +478,7 @@ async def produces_a_payload_for_multiple_subscribe_in_same_subscription(): payload2 = anext(second_subscription) assert ( - pubsub.emit( + await pubsub.send( { "from": "yuzhi@graphql.org", "subject": "Alright", @@ -501,7 +501,7 @@ async def produces_a_payload_for_multiple_subscribe_in_same_subscription(): @mark.anyio async def produces_a_payload_per_subscription_event(): - pubsub = SimplePubSub() + pubsub = create_broadcast_stream(math.inf) subscription = await create_subscription(pubsub) assert isinstance(subscription, MapAsyncIterator) @@ -510,7 +510,7 @@ async def produces_a_payload_per_subscription_event(): # A new email arrives! assert ( - pubsub.emit( + await pubsub.send( { "from": "yuzhi@graphql.org", "subject": "Alright", @@ -534,7 +534,7 @@ async def produces_a_payload_per_subscription_event(): # Another new email arrives, before anext(subscription) is called. assert ( - pubsub.emit( + await pubsub.send( { "from": "hyo@graphql.org", "subject": "Tools", @@ -562,7 +562,7 @@ async def produces_a_payload_per_subscription_event(): # Which may result in disconnecting upstream services as well. assert ( - pubsub.emit( + await pubsub.send( { "from": "adam@graphql.org", "subject": "Important", @@ -579,7 +579,7 @@ async def produces_a_payload_per_subscription_event(): @mark.anyio async def produces_a_payload_when_there_are_multiple_events(): - pubsub = SimplePubSub() + pubsub = create_broadcast_stream(math.inf) subscription = await create_subscription(pubsub) assert isinstance(subscription, MapAsyncIterator) @@ -587,7 +587,7 @@ async def produces_a_payload_when_there_are_multiple_events(): # A new email arrives! assert ( - pubsub.emit( + await pubsub.send( { "from": "yuzhi@graphql.org", "subject": "Alright", @@ -612,7 +612,7 @@ async def produces_a_payload_when_there_are_multiple_events(): # A new email arrives! assert ( - pubsub.emit( + await pubsub.send( { "from": "yuzhi@graphql.org", "subject": "Alright 2", @@ -635,7 +635,7 @@ async def produces_a_payload_when_there_are_multiple_events(): @mark.anyio async def should_not_trigger_when_subscription_is_already_done(): - pubsub = SimplePubSub() + pubsub = create_broadcast_stream(math.inf) subscription = await create_subscription(pubsub) assert isinstance(subscription, MapAsyncIterator) @@ -643,7 +643,7 @@ async def should_not_trigger_when_subscription_is_already_done(): # A new email arrives! assert ( - pubsub.emit( + await pubsub.send( { "from": "yuzhi@graphql.org", "subject": "Alright", @@ -669,7 +669,7 @@ async def should_not_trigger_when_subscription_is_already_done(): # A new email arrives! assert ( - pubsub.emit( + await pubsub.send( { "from": "yuzhi@graphql.org", "subject": "Alright 2", @@ -685,7 +685,7 @@ async def should_not_trigger_when_subscription_is_already_done(): @mark.anyio async def should_not_trigger_when_subscription_is_thrown(): - pubsub = SimplePubSub() + pubsub = create_broadcast_stream(math.inf) subscription = await create_subscription(pubsub) assert isinstance(subscription, MapAsyncIterator) @@ -693,7 +693,7 @@ async def should_not_trigger_when_subscription_is_thrown(): # A new email arrives! assert ( - pubsub.emit( + await pubsub.send( { "from": "yuzhi@graphql.org", "subject": "Alright", @@ -726,7 +726,7 @@ async def should_not_trigger_when_subscription_is_thrown(): @mark.anyio async def event_order_is_correct_for_multiple_publishes(): - pubsub = SimplePubSub() + pubsub = create_broadcast_stream(math.inf) subscription = await create_subscription(pubsub) assert isinstance(subscription, MapAsyncIterator) @@ -734,7 +734,7 @@ async def event_order_is_correct_for_multiple_publishes(): # A new email arrives! assert ( - pubsub.emit( + await pubsub.send( { "from": "yuzhi@graphql.org", "subject": "Message", @@ -747,7 +747,7 @@ async def event_order_is_correct_for_multiple_publishes(): # A new email arrives! assert ( - pubsub.emit( + await pubsub.send( { "from": "yuzhi@graphql.org", "subject": "Message 2", diff --git a/tests/pyutils/test_broadcast_stream.py b/tests/pyutils/test_broadcast_stream.py new file mode 100644 index 00000000..3832c115 --- /dev/null +++ b/tests/pyutils/test_broadcast_stream.py @@ -0,0 +1,192 @@ +from anyio import ( + sleep, + fail_after, + create_task_group, + Event, + ClosedResourceError, + WouldBlock, + BrokenResourceError, +) +import math + +from pytest import mark, raises + +from graphql.pyutils import create_broadcast_stream + + +def describe_broadcast_stream(): + @mark.anyio + async def subscribe_async_iterator_mock(): + pubsub = create_broadcast_stream(math.inf) + iterator = pubsub.get_listener() + + with fail_after(1): + # Queue up publishes + assert pubsub.send_nowait("Apple") is True + assert pubsub.send_nowait("Banana") is True + + # Read payloads + assert await iterator.__anext__() == "Apple" + assert await iterator.__anext__() == "Banana" + + # Waiting for data + is_waiting = Event() + i3 = None + i4 = None + + async def wait_for_next(): + nonlocal i3, i4 + is_waiting.set() + i3 = await iterator.__anext__() + i4 = await iterator.__anext__() + + async with create_task_group() as tg: + tg.start_soon(wait_for_next) + await is_waiting.wait() + await sleep(0.1) + # Publish + assert pubsub.send_nowait("Coconut") is True + assert pubsub.send_nowait("Durian") is True + + assert i3 == "Coconut" + assert i4 == "Durian" + + # Terminate queue + await iterator.aclose() + + # Publish is not caught after terminate + assert pubsub.send_nowait("Fig") is False + + with raises(ClosedResourceError): + await iterator.__anext__() + + @mark.anyio + async def iterator_aclose_closes_listeners(): + pubsub = create_broadcast_stream(math.inf) + assert not pubsub._state.listeners + iterator = pubsub.get_listener() + assert len(pubsub._state.listeners) == 1 + + for value in range(3): + pubsub.send_nowait(value) + await sleep(0) + await iterator.aclose() + assert pubsub.send_nowait(value) is False # listeners closed on next send + assert not pubsub._state.listeners + + @mark.anyio + async def stream_aclose_closes_listeners(): + pubsub = create_broadcast_stream(math.inf) + iterator = pubsub.get_listener() + await pubsub.aclose() + with fail_after(1): + async for el in iterator: + pass + + @mark.anyio + async def multiple_listeners_get_object(): + pubsub = create_broadcast_stream(math.inf) + iterator1 = pubsub.get_listener() + pubsub.send_nowait("A") + iterator2 = pubsub.get_listener() + pubsub.send_nowait("B") + pubsub.send_nowait("C") + await pubsub.aclose() + await pubsub.aclose() # idempotent + with fail_after(1): + assert ["A", "B", "C"] == [el async for el in iterator1] + assert ["B", "C"] == [el async for el in iterator2] + + @mark.anyio + async def cloned_broadcast(): + pubsub1 = create_broadcast_stream(math.inf) + pubsub2 = pubsub1.clone() + iterator1 = pubsub1.get_listener() + pubsub2.send_nowait("A") + iterator2 = pubsub2.get_listener() + pubsub1.send_nowait("B") + pubsub2.send_nowait("C") + + assert pubsub2._state.ref_counter == 2 + await pubsub1.aclose() + assert pubsub2._state.ref_counter == 1 + # check expected errors after close + with raises(ClosedResourceError): + pubsub1.send_nowait("D") + with raises(ClosedResourceError): + await pubsub1.send("D") + with raises(ClosedResourceError): + pubsub1.clone() + + # idempotent close + assert pubsub2._state.ref_counter == 1 + await pubsub1.aclose() + assert pubsub2._state.ref_counter == 1 + pubsub2.send_nowait("E") + + await pubsub2.aclose() + with fail_after(1): + assert ["A", "B", "C", "E"] == [el async for el in iterator1] + assert ["B", "C", "E"] == [el async for el in iterator2] + + def blocking_stream(): + pubsub = create_broadcast_stream(0) + pubsub.send_nowait("A") # no listener + + pubsub.get_listener() + with raises(WouldBlock): + pubsub.send_nowait("B") + + @mark.anyio + async def close_listeners(): + pubsub = create_broadcast_stream(math.inf) + iterator = pubsub.get_listener() + assert pubsub._state.listeners + await iterator.aclose() + pubsub.send_nowait("A") + assert not pubsub._state.listeners + + @mark.anyio + async def requires_listeners(): + pubsub = create_broadcast_stream(math.inf, requires_listeners=True) + with raises(BrokenResourceError): + pubsub.send_nowait("A") + + iterator = pubsub.get_listener() + assert pubsub._state.listeners + await iterator.aclose() + with raises(BrokenResourceError): + pubsub.send_nowait("B") + assert not pubsub._state.listeners + + iterator = pubsub.get_listener() + assert pubsub._state.listeners + await iterator.aclose() + with raises(BrokenResourceError): + await pubsub.send("C") + assert not pubsub._state.listeners + + @mark.anyio + async def transformation(): + pubsub = create_broadcast_stream(math.inf) + iterator1 = pubsub.get_listener(transform=lambda el: 2 * el) + pubsub.send_nowait(1) + iterator2 = pubsub.get_listener(transform=lambda el: 3 * el) + pubsub.send_nowait(2) + pubsub.send_nowait(3) + await pubsub.aclose() + with fail_after(1): + assert [2, 4, 6] == [el async for el in iterator1] + assert [6, 9] == [el async for el in iterator2] + + @mark.anyio + async def context_manager(): + pubsub = create_broadcast_stream(math.inf) + async with pubsub as context: + assert pubsub is context + assert pubsub._closed + + pubsub = create_broadcast_stream(math.inf) + with pubsub as context: + assert pubsub is context + assert pubsub._closed diff --git a/tests/pyutils/test_simple_pub_sub.py b/tests/pyutils/test_simple_pub_sub.py index 59861d94..dbbf775b 100644 --- a/tests/pyutils/test_simple_pub_sub.py +++ b/tests/pyutils/test_simple_pub_sub.py @@ -7,11 +7,12 @@ def describe_simple_pub_sub(): - @mark.anyio - async def subscribe_async_iterator_mock(): + @mark.parametrize("anyio_backend", ["asyncio"]) + async def subscribe_async_iterator_mock(anyio_backend): pubsub = SimplePubSub() iterator = pubsub.get_subscriber() + assert iterator.__aiter__() == iterator # Queue up publishes assert pubsub.emit("Apple") is True assert pubsub.emit("Banana") is True @@ -51,8 +52,8 @@ async def subscribe_async_iterator_mock(): with raises(StopAsyncIteration): await iterator.__anext__() - @mark.anyio - async def iterator_aclose_empties_push_queue(): + @mark.parametrize("anyio_backend", ["asyncio"]) + async def iterator_aclose_empties_push_queue(anyio_backend): pubsub = SimplePubSub() assert not pubsub.subscribers iterator = pubsub.get_subscriber() @@ -69,8 +70,8 @@ async def iterator_aclose_empties_push_queue(): assert iterator.pull_queue.qsize() == 0 assert not iterator.listening - @mark.anyio - async def iterator_aclose_empties_pull_queue(): + @mark.parametrize("anyio_backend", ["asyncio"]) + async def iterator_aclose_empties_pull_queue(anyio_backend): pubsub = SimplePubSub() assert not pubsub.subscribers iterator = pubsub.get_subscriber() @@ -86,11 +87,26 @@ async def iterator_aclose_empties_pull_queue(): assert iterator.pull_queue.qsize() == 0 assert not iterator.listening - @mark.anyio - async def iterator_aclose_is_idempotent(): + @mark.parametrize("anyio_backend", ["asyncio"]) + async def iterator_aclose_is_idempotent(anyio_backend): pubsub = SimplePubSub() iterator = pubsub.get_subscriber() assert iterator.listening for n in range(3): await iterator.aclose() assert not iterator.listening + + @mark.parametrize("anyio_backend", ["asyncio"]) + async def non_async_subscriber(anyio_backend): + message = "test message" + received_message = False + + def get_msg(evt): + nonlocal received_message + assert evt == message + received_message = True + + pubsub = SimplePubSub() + pubsub.subscribers.add(get_msg) + pubsub.emit(message) + assert received_message diff --git a/tests/test_user_registry.py b/tests/test_user_registry.py index fdbd7f9a..98b394dd 100644 --- a/tests/test_user_registry.py +++ b/tests/test_user_registry.py @@ -4,18 +4,20 @@ operations on a simulated user registry database backend. """ -from anyio import create_task_group, fail_after, sleep +from anyio import create_task_group, fail_after, sleep, Event, TASK_STATUS_IGNORED +from anyio.abc import TaskStatus +from anyio.streams.memory import MemoryObjectReceiveStream from collections import defaultdict from enum import Enum -from inspect import isawaitable -from typing import Any, Dict, List, NamedTuple, Optional +from typing import Any, Dict, List, NamedTuple, Optional, Callable, Tuple, Coroutine +from functools import partial from pytest import fixture, mark from graphql import ( graphql, parse, - subscribe, + subscribe, GraphQLArgument, GraphQLBoolean, GraphQLEnumType, @@ -30,7 +32,7 @@ GraphQLString, ) -from graphql.pyutils import SimplePubSub, SimplePubSubIterator +from graphql.pyutils import create_broadcast_stream from graphql.execution.map_async_iterator import MapAsyncIterator @@ -57,7 +59,7 @@ class UserRegistry: def __init__(self, **users): self._registry: Dict[str, User] = users - self._pubsub = defaultdict(SimplePubSub) + self._pubsub = defaultdict(partial(create_broadcast_stream, 0)) async def get(self, id_: str) -> Optional[User]: """Get a user object from the registry""" @@ -70,7 +72,7 @@ async def create(self, **kwargs) -> User: id_ = str(len(self._registry)) user = User(id=id_, **kwargs) self._registry[id_] = user - self.emit_event(MutationEnum.CREATED, user) + await self.emit_event(MutationEnum.CREATED, user) return user async def update(self, id_: str, **kwargs) -> User: @@ -79,24 +81,24 @@ async def update(self, id_: str, **kwargs) -> User: # noinspection PyProtectedMember user = self._registry[id_]._replace(**kwargs) self._registry[id_] = user - self.emit_event(MutationEnum.UPDATED, user) + await self.emit_event(MutationEnum.UPDATED, user) return user async def delete(self, id_: str) -> User: """Delete a user object in the registry""" await sleep(0) user = self._registry.pop(id_) - self.emit_event(MutationEnum.DELETED, user) + await self.emit_event(MutationEnum.DELETED, user) return user - def emit_event(self, mutation: MutationEnum, user: User) -> None: + async def emit_event(self, mutation: MutationEnum, user: User) -> None: """Emit mutation events for the given object and its class""" payload = {"user": user, "mutation": mutation.value} - self._pubsub[None].emit(payload) # notify all user subscriptions - self._pubsub[user.id].emit(payload) # notify single user subscriptions + await self._pubsub[None].send(payload) # notify all user subscriptions + await self._pubsub[user.id].send(payload) # notify single user subscriptions - def event_iterator(self, id_: Optional[str]) -> SimplePubSubIterator: - return self._pubsub[id_].get_subscriber() + def event_iterator(self, id_: Optional[str]) -> MemoryObjectReceiveStream: + return self._pubsub[id_].get_listener() mutation_type = GraphQLEnumType("MutationType", MutationEnum) @@ -158,8 +160,9 @@ async def resolve_delete_user(_root, info, id): async def subscribe_user(_root, info, id=None): """Subscribe to mutations of a specific user object or all user objects""" async_iterator = info.context["registry"].event_iterator(id) - async for event in async_iterator: - yield await event if isawaitable(event) else event # pragma: no cover exit + async with async_iterator: # pragma: no cover exit + async for event in async_iterator: + yield event # pragma: no cover exit # noinspection PyShadowingBuiltins,PyUnusedLocal @@ -253,149 +256,174 @@ async def query_user(context): } +def expect_events( + num_events: int, +) -> Tuple[ + Dict[str, Any], + Event, + Callable[[MemoryObjectReceiveStream, str], Coroutine[None, None, None]], +]: + received = {} + all_events_received = Event() + + async def subscriber( + publisher, event_name, task_status: TaskStatus = TASK_STATUS_IGNORED + ): + nonlocal num_events + async with publisher.get_listener() as listener: + task_status.started() + async for msg in listener: # pragma: no cover exit + received[event_name] = msg + num_events -= 1 + print("received", event_name, msg) + print("remaining_num", num_events) + if num_events == 0: + all_events_received.set() + + return received, all_events_received, subscriber + + def describe_mutation(): @mark.anyio async def create_user(context): - received = {} - - def subscriber(event_name): - def receive(msg): - received[event_name] = msg - - return receive + received, received_events, subscriber = expect_events(2) # noinspection PyProtectedMember pubsub = context["registry"]._pubsub - pubsub[None].subscribers.add(subscriber("User")) - pubsub["0"].subscribers.add(subscriber("User 0")) - - query = """ - mutation ($userData: UserInputType!) { - createUser(data: $userData) { - id, firstName, lastName, tweets, verified + with fail_after(1): + async with create_task_group() as tg: + await tg.start(subscriber, pubsub[None], "User") + await tg.start(subscriber, pubsub["0"], "User 0") + + query = """ + mutation ($userData: UserInputType!) { + createUser(data: $userData) { + id, firstName, lastName, tweets, verified + } + } + """ + user_data = dict( + firstName="John", lastName="Doe", tweets=42, verified=True + ) + variables = {"userData": user_data} + result = await graphql( + schema, query, context_value=context, variable_values=variables + ) + + user = await context["registry"].get("0") + assert user == User(id="0", **user_data) # type: ignore + + assert result.errors is None + assert result.data == { + "createUser": { + "id": user.id, + "firstName": user.firstName, + "lastName": user.lastName, + "tweets": user.tweets, + "verified": user.verified, + } } - } - """ - user_data = dict(firstName="John", lastName="Doe", tweets=42, verified=True) - variables = {"userData": user_data} - result = await graphql( - schema, query, context_value=context, variable_values=variables - ) - - user = await context["registry"].get("0") - assert user == User(id="0", **user_data) # type: ignore - assert result.errors is None - assert result.data == { - "createUser": { - "id": user.id, - "firstName": user.firstName, - "lastName": user.lastName, - "tweets": user.tweets, - "verified": user.verified, - } - } - - assert received == { - "User": {"user": user, "mutation": MutationEnum.CREATED.value}, - "User 0": {"user": user, "mutation": MutationEnum.CREATED.value}, - } + print("WAITING FOR EVENTS") + await received_events.wait() + print("DONE") + assert received == { + "User": {"user": user, "mutation": MutationEnum.CREATED.value}, + "User 0": {"user": user, "mutation": MutationEnum.CREATED.value}, + } + tg.cancel_scope.cancel() @mark.anyio async def update_user(context): - received = {} - - def subscriber(event_name): - def receive(msg): - received[event_name] = msg - - return receive + received, received_events, subscriber = expect_events(2) # noinspection PyProtectedMember pubsub = context["registry"]._pubsub - pubsub[None].subscribers.add(subscriber("User")) - pubsub["0"].subscribers.add(subscriber("User 0")) - - user = await context["registry"].create( - firstName="John", lastName="Doe", tweets=42, verified=True - ) - user_data = { - "firstName": "Jane", - "lastName": "Roe", - "tweets": 210, - "verified": False, - } + with fail_after(1): + async with create_task_group() as tg: + await tg.start(subscriber, pubsub[None], "User") + await tg.start(subscriber, pubsub["0"], "User 0") - query = """ - mutation ($userId: ID!, $userData: UserInputType!) { - updateUser(id: $userId, data: $userData) { - id, firstName, lastName, tweets, verified + user = await context["registry"].create( + firstName="John", lastName="Doe", tweets=42, verified=True + ) + user_data = { + "firstName": "Jane", + "lastName": "Roe", + "tweets": 210, + "verified": False, } - }""" - - variables = {"userId": user.id, "userData": user_data} - result = await graphql( - schema, query, context_value=context, variable_values=variables - ) - user = await context["registry"].get("0") - assert user == User(id="0", **user_data) # type: ignore - - assert result.errors is None - assert result.data == { - "updateUser": { - "id": user.id, - "firstName": user.firstName, - "lastName": user.lastName, - "tweets": user.tweets, - "verified": user.verified, - } - } + query = """ + mutation ($userId: ID!, $userData: UserInputType!) { + updateUser(id: $userId, data: $userData) { + id, firstName, lastName, tweets, verified + } + }""" + + variables = {"userId": user.id, "userData": user_data} + result = await graphql( + schema, query, context_value=context, variable_values=variables + ) + + user = await context["registry"].get("0") + assert user == User(id="0", **user_data) # type: ignore + + assert result.errors is None + assert result.data == { + "updateUser": { + "id": user.id, + "firstName": user.firstName, + "lastName": user.lastName, + "tweets": user.tweets, + "verified": user.verified, + } + } - assert received == { - "User": {"user": user, "mutation": MutationEnum.UPDATED.value}, - "User 0": {"user": user, "mutation": MutationEnum.UPDATED.value}, - } + await received_events.wait() + assert received == { + "User": {"user": user, "mutation": MutationEnum.UPDATED.value}, + "User 0": {"user": user, "mutation": MutationEnum.UPDATED.value}, + } + tg.cancel_scope.cancel() @mark.anyio async def delete_user(context): - received = {} - - def subscriber(name): - def receive(msg): - received[name] = msg - - return receive + received, received_events, subscriber = expect_events(2) # noinspection PyProtectedMember pubsub = context["registry"]._pubsub - pubsub[None].subscribers.add(subscriber("User")) - pubsub["0"].subscribers.add(subscriber("User 0")) + with fail_after(1): + async with create_task_group() as tg: + await tg.start(subscriber, pubsub[None], "User") + await tg.start(subscriber, pubsub["0"], "User 0") - user = await context["registry"].create( - firstName="John", lastName="Doe", tweets=42, verified=True - ) + user = await context["registry"].create( + firstName="John", lastName="Doe", tweets=42, verified=True + ) - query = """ - mutation ($userId: ID!) { - deleteUser(id: $userId) - } - """ + query = """ + mutation ($userId: ID!) { + deleteUser(id: $userId) + } + """ - variables = {"userId": user.id} - result = await graphql( - schema, query, context_value=context, variable_values=variables - ) + variables = {"userId": user.id} + result = await graphql( + schema, query, context_value=context, variable_values=variables + ) - assert result.errors is None - assert result.data == {"deleteUser": True} + assert result.errors is None + assert result.data == {"deleteUser": True} - assert await context["registry"].get(user.id) is None + assert await context["registry"].get(user.id) is None - assert received == { - "User": {"user": user, "mutation": MutationEnum.DELETED.value}, - "User 0": {"user": user, "mutation": MutationEnum.DELETED.value}, - } + await received_events.wait() + assert received == { + "User": {"user": user, "mutation": MutationEnum.DELETED.value}, + "User 0": {"user": user, "mutation": MutationEnum.DELETED.value}, + } + tg.cancel_scope.cancel() def describe_subscription(): diff --git a/tox.ini b/tox.ini index 5751ed78..479043eb 100644 --- a/tox.ini +++ b/tox.ini @@ -12,7 +12,7 @@ python = [testenv:black] basepython = python3.9 -deps = black==22.1.0 +deps = black==22.3.0 commands = black src tests setup.py -t py39 --check @@ -26,6 +26,7 @@ commands = basepython = python3.9 deps = mypy==0.931 + trio-typing>=0.7.0,<0.8.0 pytest>=6.2,<7 commands = mypy src tests @@ -47,11 +48,12 @@ commands = [testenv] deps = pytest>=6.2,<7 - pytest-asyncio>=0.16,<1 pytest-benchmark>=3.4,<4 pytest-cov>=3,<4 pytest-describe>=2,<3 pytest-timeout>=2,<3 + anyio>=3.5.0 py36,py37: typing-extensions>=4,<5 + py37,py38,py39,py310: trio>=0.16,<0.21 commands = pytest tests {posargs: --cov-report=term-missing --cov=graphql --cov=tests --cov-fail-under=100}