From a543e2b4a45502577720e645b8f35863a20d794f Mon Sep 17 00:00:00 2001 From: phlax Date: Tue, 25 Jan 2022 05:38:45 +0000 Subject: [PATCH] tooling: Use updated pytooling deps (#19660) The pytooling packages have been updated to remove all non-async runners/checkers, and to refactor some of the code. This has reduced code complexity/paths/lines/tests etc. This PR updates envoy-side to use the newer async-only pytooling packages, and updates the envoy tooling to use async or newer code paths as required/appropriate In order to make this work i had to readd pytest-asyncio to the envoy deps, as the tests are again testing async fun. I also had to add nested-asyncio in order for the test runner to be able to start/stop test loops, inside the checkers loop. Signed-off-by: Ryan Northey --- .bazelignore | 1 + pytest.ini | 1 + tools/base/BUILD | 12 -- tools/base/bazel_query.py | 63 ------- tools/base/envoy_python.bzl | 2 +- tools/base/requirements.in | 28 +-- tools/base/requirements.txt | 165 +++++++++--------- tools/code_format/python_check.py | 2 +- tools/dependency/BUILD | 3 +- tools/dependency/validate.py | 60 ++++--- tools/dependency/validate_test.py | 40 +++-- tools/docs/BUILD | 2 +- tools/docs/rst_check.py | 6 +- tools/docs/tests/test_rst_check.py | 4 +- tools/extensions/BUILD | 2 +- tools/extensions/extensions_check.py | 12 +- .../extensions/tests/test_extensions_check.py | 12 +- tools/testing/BUILD | 14 +- tools/testing/all_pytests.py | 40 +++-- tools/testing/plugin.py | 32 +--- tools/testing/python_coverage.py | 8 +- tools/testing/python_pytest.py | 8 +- tools/testing/tests/test_all_pytests.py | 46 ++--- tools/testing/tests/test_python_coverage.py | 4 +- tools/testing/tests/test_python_pytest.py | 83 +-------- 25 files changed, 270 insertions(+), 380 deletions(-) delete mode 100644 tools/base/bazel_query.py diff --git a/.bazelignore b/.bazelignore index 8bbbe337c66a..abf80e90a9f3 100644 --- a/.bazelignore +++ b/.bazelignore @@ -1,3 +1,4 @@ api examples/grpc-bridge/script tools/clang_tools +tools/dev/src diff --git a/pytest.ini b/pytest.ini index be97f8e60e6a..816796951009 100644 --- a/pytest.ini +++ b/pytest.ini @@ -2,3 +2,4 @@ addopts = -raq --ignore=tools/testing/external/*,__init__.py,testing/conf --color=yes --cov-append -p tools.testing.plugin --cov-config=.coveragerc -Werror -vv tools testpaths = tests +asyncio_mode = auto diff --git a/tools/base/BUILD b/tools/base/BUILD index eba567e77fc6..7f30400f1b00 100644 --- a/tools/base/BUILD +++ b/tools/base/BUILD @@ -1,5 +1,3 @@ -load("@rules_python//python:defs.bzl", "py_binary") -load("@base_pip3//:requirements.bzl", "requirement") load("//bazel:envoy_build_system.bzl", "envoy_package") licenses(["notice"]) # Apache 2 @@ -9,13 +7,3 @@ envoy_package() exports_files([ "entry_point.py", ]) - -py_binary( - name = "bazel_query", - srcs = ["bazel_query.py"], - main = "bazel_query.py", - deps = [ - "@envoy_repo", - requirement("envoy.base.utils"), - ], -) diff --git a/tools/base/bazel_query.py b/tools/base/bazel_query.py deleted file mode 100644 index 48825838de45..000000000000 --- a/tools/base/bazel_query.py +++ /dev/null @@ -1,63 +0,0 @@ -#!/usr/bin/env python3 -"""Envoy Bazel query implementation. - -This module can be used either as a `py_binary` or a `py_library`. - -cli usage (outputs to json): - -```console -$ bazel run //tools/base:bazel_query "deps(source/...)" | jq "." -``` - -python usage: - -```python -from tools.base.bazel_query import query - -result = query("deps(source/...)") -``` - -NB: This allows running queries that do not define scope and cannot be -run as genqueries. **It should not therefore be used in build rules**. -""" - -# The upstream lib is maintained here: -# -# https://github.com/envoyproxy/pytooling/tree/main/envoy.base.utils -# -# Please submit issues/PRs to the pytooling repo: -# -# https://github.com/envoyproxy/pytooling -# - -import json -import pathlib -import sys -from functools import cached_property - -import abstracts - -from envoy.base.utils import ABazelQuery - -import envoy_repo - - -@abstracts.implementer(ABazelQuery) -class EnvoyBazelQuery: - - @cached_property - def path(self) -> pathlib.Path: - return pathlib.Path(envoy_repo.PATH) - - -query = EnvoyBazelQuery().query - - -def main(*args): - print(json.dumps(query(*args[0:1]))) - - -if __name__ == "__main__": - sys.exit(main(*sys.argv[1:])) - -__all__ = ("query",) diff --git a/tools/base/envoy_python.bzl b/tools/base/envoy_python.bzl index 3745bc774a5d..284816da6bd9 100644 --- a/tools/base/envoy_python.bzl +++ b/tools/base/envoy_python.bzl @@ -1,4 +1,4 @@ -load("@rules_python//python:defs.bzl", "py_binary", "py_library") +load("@rules_python//python:defs.bzl", "py_binary") load("@base_pip3//:requirements.bzl", base_entry_point = "entry_point") def envoy_py_test(name, package, visibility, envoy_prefix = "@envoy"): diff --git a/tools/base/requirements.in b/tools/base/requirements.in index ba38f02e0b03..a4e11a9caaae 100644 --- a/tools/base/requirements.in +++ b/tools/base/requirements.in @@ -1,30 +1,32 @@ abstracts>=0.0.12 -aio.functional>=0.0.10 -aio.subprocess>=0.0.4 -aio.tasks>=0.0.5 +aio.api.bazel +aio.core>=0.2.0 +aio.run.runner>=0.2.1 +aio.run.checker>=0.2.1 colorama coloredlogs coverage -envoy.base.checker>=0.1.1 -envoy.base.runner>=0.1.0 -envoy.base.utils>=0.0.13 -envoy.code_format.python_check>=0.0.4 -envoy.dependency.cve_scan -envoy.dependency.pip_check>=0.0.6 -envoy.distribution.release>=0.0.4 -envoy.distribution.verify>=0.0.6 -envoy.docs.sphinx-runner>=0.0.6 +envoy.base.utils>=0.0.14 +envoy.code_format.python_check>=0.0.7 +envoy.dependency.cve_scan>=0.0.4 +envoy.dependency.pip_check>=0.1.0 +envoy.distribution.release>=0.0.7 +envoy.distribution.repo>=0.0.5 +envoy.distribution.verify>=0.0.8 +envoy.docs.sphinx-runner>=0.0.8 envoy.gpg.identity>=0.0.6 -envoy.gpg.sign>=0.0.7 +envoy.gpg.sign>=0.0.9 flake8 frozendict gitpython jinja2 +nest-asyncio pep8-naming ply pygithub pyreadline pytest +pytest-asyncio pytest-cov pytest-patches pyyaml diff --git a/tools/base/requirements.txt b/tools/base/requirements.txt index 212d0768c34c..3d28bdb9db15 100644 --- a/tools/base/requirements.txt +++ b/tools/base/requirements.txt @@ -8,41 +8,55 @@ abstracts==0.0.12 \ --hash=sha256:acc01ff56c8a05fb88150dff62e295f9071fc33388c42f1dfc2787a8d1c755ff # via # -r requirements.in - # envoy.abstract.command - # envoy.base.checker - # envoy.base.runner + # aio.api.bazel + # aio.core + # aio.run.checker + # aio.run.runner # envoy.base.utils # envoy.code-format.python-check # envoy.dependency.cve-scan # envoy.dependency.pip-check + # envoy.distribution.release + # envoy.distribution.repo # envoy.github.abstract # envoy.github.release -aio.functional==0.0.10 \ - --hash=sha256:20d3bda56196cd6916eca97965d818f61ca37f98ec9f4d23545802a180362813 \ - --hash=sha256:9d456f0c2382968aacd62cccc2b815a25fd55f53c2c6223d6e587efeb8bc2622 +aio.api.bazel==0.0.1 \ + --hash=sha256:21094c7f8ed038d4668d93efa908d0770cf4bb781373a1300f152b211ff3dc81 \ + --hash=sha256:d110ab219de520c911bd1505f516cf208fea75fe66529f638c9b4ac182b20ab8 + # via -r requirements.in +aio.core==0.2.0 \ + --hash=sha256:40a6d6495eaf11a9333847e5d74ed84452da5dfbc785c65f022d7c1343126f4c \ + --hash=sha256:a174f73793b57050c53463dde4a06f2655f613c72f4789f568dd4f32bc54af2c # via # -r requirements.in - # aio.tasks + # envoy.code-format.python-check # envoy.dependency.cve-scan + # envoy.distribution.release + # envoy.distribution.repo # envoy.github.abstract # envoy.github.release -aio.stream==0.0.2 \ - --hash=sha256:6f5baaff48f6319db134cd56c06ccf89db1f7c5f67a26382e081efc96f2f675d - # via envoy.github.release -aio.subprocess==0.0.4 \ - --hash=sha256:fd504a7c02423c40fde19ad87b62932b9eaa091f5a22d26b89b452059a728750 +aio.run.checker==0.2.1 \ + --hash=sha256:58c63f9bcaeb4e8bd8682fe3fb235012863010af0b6d83fcbc3c76d84ca162db \ + --hash=sha256:cd5c379e05ef401d70ccfdca53718b2462a6e752b224116268bc9e2bf7663da8 # via # -r requirements.in # envoy.code-format.python-check -aio.tasks==0.0.5 \ - --hash=sha256:a2225785b6f3ecbe0ca879a2a1c0314440c7a3606dd92b2731db547a60b5ed5e \ - --hash=sha256:dae64f6d72b07b1f6d68dfa991670572b1939e9585f10ac64f5367c6fd5b4df2 + # envoy.dependency.cve-scan + # envoy.dependency.pip-check + # envoy.distribution.distrotest + # envoy.distribution.verify +aio.run.runner==0.2.1 \ + --hash=sha256:80062b417b127b433224fd889673b06167cd9fbaaf24c4148f799e3f9993632d \ + --hash=sha256:87feef7303efba78908dde07482999daa2a0254b37a7c972b4fdfab685eb416b # via # -r requirements.in - # envoy.code-format.python-check - # envoy.dependency.cve-scan + # aio.run.checker + # envoy.distribution.release + # envoy.distribution.repo + # envoy.docs.sphinx-runner # envoy.github.abstract # envoy.github.release + # envoy.gpg.sign aiodocker==0.21.0 \ --hash=sha256:1f2e6db6377195962bb676d4822f6e3a0c525e1b5d60b8ebbab68230bff3d227 \ --hash=sha256:6fe00135bb7dc40a407669d3157ecdfd856f3737d939df54f40a479d40cf7bdc @@ -53,7 +67,7 @@ aiodocker==0.21.0 \ aiofiles==0.7.0 \ --hash=sha256:a1c4fc9b2ff81568c83e21392a82f344ea9d23da906e4f6a52662764545e19d4 \ --hash=sha256:c67a6823b5f23fcab0a2595a289cec7d8c863ffcb4322fb8cd6b90400aedfdbc - # via aio.stream + # via aio.core aiohttp==3.7.4.post0 \ --hash=sha256:02f46fc0e3c5ac58b80d4d56eb0a7c7d97fcef69ace9326289fb9f1955e65cfe \ --hash=sha256:0563c1b3826945eecd62186f3f5c7d31abb7391fedc893b7e2b26303b5a9f3fe \ @@ -93,7 +107,7 @@ aiohttp==3.7.4.post0 \ --hash=sha256:f881853d2643a29e643609da57b96d5f9c9b93f62429dcc1cbb413c7d07f0e1a \ --hash=sha256:fe60131d21b31fd1a14bd43e6bb88256f69dfc3188b3a89d736d6c71ed43ec95 # via - # aio.stream + # aio.core # aiodocker # envoy.dependency.cve-scan # envoy.github.abstract @@ -189,7 +203,7 @@ coloredlogs==15.0.1 \ --hash=sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0 # via # -r requirements.in - # envoy.base.runner + # aio.run.runner coverage[toml]==6.0 \ --hash=sha256:08fd55d2e00dac4c18a2fa26281076035ec86e764acdc198b9185ce749ada58f \ --hash=sha256:11ce082eb0f7c2bbfe96f6c8bcc3a339daac57de4dc0f3186069ec5c58da911c \ @@ -260,80 +274,65 @@ docutils==0.16 \ # sphinx # sphinx-rtd-theme # sphinx-tabs -envoy.abstract.command==0.0.3 \ - --hash=sha256:4b7b15c91bea1f2eb7c2e8e35f95cd9437e1c8f151adc093bf7858fc85d48221 - # via - # envoy.base.runner - # envoy.distribution.release -envoy.base.checker==0.1.1 \ - --hash=sha256:58fb4b78e0df5d0d400d47cc6326c739a9f5e759bd43b90f35612dcf8a3cd838 \ - --hash=sha256:a7db6d2a9a3178c95102eb177f41208de677b803b9fcfb6fca603a5a1997be01 +envoy.base.utils==0.0.14 \ + --hash=sha256:416e088320dff3f12dc270b7725812339b94816264351f591c09869b054971cc \ + --hash=sha256:882216c2dbc8d23ab77cac9b7483e93cfca8aea6aa6fc5c23490e2dccf959c63 # via # -r requirements.in # envoy.code-format.python-check # envoy.dependency.cve-scan # envoy.dependency.pip-check # envoy.distribution.distrotest - # envoy.distribution.verify -envoy.base.runner==0.1.0 \ - --hash=sha256:38f2660d258ca54400953a3e94c430006169436cb9bae3c4fdd22d73bb6d2245 \ - --hash=sha256:6f7e3465719a8bd98b6a104271f2d612da361f521e1e9789d6e99904a6d641de - # via - # -r requirements.in - # envoy.base.checker # envoy.distribution.release - # envoy.docs.sphinx-runner - # envoy.github.abstract - # envoy.gpg.sign -envoy.base.utils==0.0.13 \ - --hash=sha256:45673d598f98d15c89466cfda987637031c6ff70b4843d901144199d01aef083 \ - --hash=sha256:c0d5cde2aa18ad01ce7c2c580c5ce81a29a2a33e1a7b39dd35997c9011adcb01 - # via - # -r requirements.in - # envoy.code-format.python-check - # envoy.dependency.cve-scan - # envoy.dependency.pip-check - # envoy.distribution.distrotest + # envoy.distribution.repo # envoy.distribution.verify # envoy.docs.sphinx-runner # envoy.github.release # envoy.gpg.sign -envoy.code-format.python-check==0.0.4 \ - --hash=sha256:5e166102d1f873f0c14640bcef87b46147cbad1cb68888c977acfde7fce96e04 +envoy.code-format.python-check==0.0.7 \ + --hash=sha256:c34f12946c908d2c7deb9faefecb044d8d5a9458755b40ed2537b04184fc8a21 \ + --hash=sha256:c4758e9da6d5cba437f8948becadb4c3ab5f9f01a07f1a0965944873ae724963 # via -r requirements.in -envoy.dependency.cve-scan==0.0.1 \ - --hash=sha256:438973e6258deb271d60a9ad688c13ebf9c5360ccb9b6b0d4af3b3228235b153 \ - --hash=sha256:733fa5c6bdbe91da4afe1d46bca75279f717e410693866825d92208fa0d3418f +envoy.dependency.cve-scan==0.0.4 \ + --hash=sha256:036bc115f09b3e14151708a33f9fe4c4ee32e911c0096ff44c140492fd3bcc9a \ + --hash=sha256:087abcbc5a366d0ef27359829096451b1578feb11f87920cc63dc853ebf2ac71 # via -r requirements.in -envoy.dependency.pip-check==0.0.7 \ - --hash=sha256:970a402f2574ee4db1fb6fe63136ef1b5c1f65784e1e805d13875d4d5595178d \ - --hash=sha256:cae6b3f9eaf2d93b146bc8bdf7fb8f2f9fccd3552b34506b0764d412a58a8d6c +envoy.dependency.pip-check==0.1.0 \ + --hash=sha256:8b05a513969300a0e5dd54a0440e2a564296cb29b7ef7f4c25dc9c7e4ff30e28 \ + --hash=sha256:f9ccf73a513f90fbf1d161f125710afda52e74968e89bfecc6380cdd70c02553 # via -r requirements.in -envoy.distribution.distrotest==0.0.5 \ - --hash=sha256:042747ff9691fe33bfccbf7821bae5735b705b9488a9a97f702d6b5b37063245 \ - --hash=sha256:ea9d7cd819c0dbc34bc412c8c6608cb2898b002517ff32c3baa2a1b5275e93fb +envoy.distribution.distrotest==0.0.7 \ + --hash=sha256:3de711ba72cc78158cc70aa6957dc7f029f4c0877196d42a4d5a11757fda03e0 \ + --hash=sha256:50cdba5f94bc82a632774cfd9e64a288d96dfbc1c2b5db33189dfb2dd913fc96 # via envoy.distribution.verify -envoy.distribution.release==0.0.4 \ - --hash=sha256:41037e0488f0593ce5173739fe0cd1b45a4775f5a47738b85d9d04024ca241a2 +envoy.distribution.release==0.0.7 \ + --hash=sha256:adacef68b7c78ae750c3c57d92afc4ed6372f6adf568721386eb0a6e74255e75 \ + --hash=sha256:cd17ea17efffd1647047ee259c8c3918f3e48808610b1e33123ccc21776f8833 # via -r requirements.in -envoy.distribution.verify==0.0.6 \ - --hash=sha256:7b5022ca18189089f24dd31f0486a470e1a6636a8cc352a0507ca6305421f932 \ - --hash=sha256:dbb7e535a5c18e0845b6d66017e3ca816254f40b2bc025951d92c5059ba9d518 +envoy.distribution.repo==0.0.5 \ + --hash=sha256:6ee86168a83758ef6ed0884c9ca17fdcec5517a24c13b13c4e3ed10609f3b49e \ + --hash=sha256:cc2e9504d0769b82404b94bb14a35cb536a2d2a3742d5aeeeeba5cd17684a75b + # via -r requirements.in +envoy.distribution.verify==0.0.8 \ + --hash=sha256:218fbd757abc3ef1745a87525a78723f307fa6ca52bd4f7d5fee4a9c3a90d906 \ + --hash=sha256:b2f719a640fc0a477f591a7704828bd4e2da1da9133562595d276f070d3a14cc # via -r requirements.in envoy.docker.utils==0.0.2 \ --hash=sha256:a12cb57f0b6e204d646cbf94f927b3a8f5a27ed15f60d0576176584ec16a4b76 # via envoy.distribution.distrotest -envoy.docs.sphinx-runner==0.0.6 \ - --hash=sha256:0b48a732c5030ff80b896d8c8c0aa014ffcab6f25419ed9539f2bd735ea8c396 \ - --hash=sha256:634a25a45928cec2900274c67b541911a7d33509d709a740d805bc08bd1f5ffa +envoy.docs.sphinx-runner==0.0.8 \ + --hash=sha256:b9d051aa7ee995b647d00ea5a86fe3be30f399e344837185edd53ff3b6fc6988 \ + --hash=sha256:fb2dc69067d9593adebda90794e912d84bb9a1c0e9c5d23ac6da608b79a8fc98 # via -r requirements.in -envoy.github.abstract==0.0.16 \ - --hash=sha256:badf04104492fb6b37ba2163f2b225132ed04aba680beb218e7c7d918564f8ee +envoy.github.abstract==0.0.21 \ + --hash=sha256:243dae9457243fb42e4643b1f45006c3b0d3151c808884217447d29a081b26a1 \ + --hash=sha256:af4086c79120a523f6978bdf0a91c582b672cdc300241c6eb16ad91ed72b7a0c # via # envoy.distribution.release # envoy.github.release -envoy.github.release==0.0.8 \ - --hash=sha256:fbc4354030137eb565b8c4d679965e4ef60b01de0c09310441836e592ca0cd19 +envoy.github.release==0.0.11 \ + --hash=sha256:1bed7a829bd77391c33107a58057b89de34a3045895f276816e07ee0119d844f \ + --hash=sha256:a3c117892b822b9f1d29bbc92f9c82c733d972316fbe2611c4895cf1e4b1113d # via envoy.distribution.release envoy.gpg.identity==0.0.6 \ --hash=sha256:2195e09aaacbaa8d6588378ad96d206708ffaf863aead38445eb3e54e4223c34 \ @@ -341,9 +340,9 @@ envoy.gpg.identity==0.0.6 \ # via # -r requirements.in # envoy.gpg.sign -envoy.gpg.sign==0.0.7 \ - --hash=sha256:898907dc76627802ab248576d2fb7d12ed4cce75148b5b070382bb51d25e6504 \ - --hash=sha256:d61165d631d92c9726129ae72ddbb409d2514de3540a829d9ed9a090c165459e +envoy.gpg.sign==0.0.9 \ + --hash=sha256:b5c226dbbd466fcfac18deed5940e94469c247b5a6667be06680730753355091 \ + --hash=sha256:d2869a5d1acda6253b07e3ab9283a95452ed93f5bba49b3e2f38d37e6f281ea7 # via -r requirements.in flake8==4.0.1 \ --hash=sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d \ @@ -377,11 +376,12 @@ frozendict==2.2.0 \ --hash=sha256:f6293ec3181ff98e5ae98f748e8ffe6c937caf14ca9561d3a6d6d02a82c7fbff # via # -r requirements.in - # envoy.base.runner + # aio.run.runner gidgethub==5.0.1 \ --hash=sha256:3efbd6998600254ec7a2869318bd3ffde38edc3a0d37be0c14bc46b45947b682 \ --hash=sha256:67245e93eb0918b37df038148af675df43b62e832c529d7f859f6b90d9f3e70d # via + # envoy.distribution.release # envoy.github.abstract # envoy.github.release gitdb==4.0.7 \ @@ -522,20 +522,23 @@ multidict==5.1.0 \ # via # aiohttp # yarl +nest-asyncio==1.5.4 \ + --hash=sha256:3fdd0d6061a2bb16f21fe8a9c6a7945be83521d81a0d15cff52e9edee50101d6 \ + --hash=sha256:f969f6013a16fadb4adcf09d11a68a4f617c6049d7af7ac2c676110169a63abd + # via -r requirements.in packaging==21.0 \ --hash=sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7 \ --hash=sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14 # via # envoy.dependency.cve-scan + # envoy.github.abstract # envoy.github.release # pytest # sphinx pep8-naming==0.12.1 \ --hash=sha256:4a8daeaeb33cfcde779309fc0c9c0a68a3bbe2ad8a8308b763c5068f86eb9f37 \ --hash=sha256:bb2455947757d162aa4cad55dba4ce029005cd1692f2899a21d51d8630ca7841 - # via - # -r requirements.in - # envoy.code-format.python-check + # via -r requirements.in pluggy==1.0.0 \ --hash=sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159 \ --hash=sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3 @@ -609,8 +612,13 @@ pytest==6.2.5 \ --hash=sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134 # via # -r requirements.in + # pytest-asyncio # pytest-cov # pytest-patches +pytest-asyncio==0.17.2 \ + --hash=sha256:6d895b02432c028e6957d25fc936494e78c6305736e785d9fee408b1efbc7ff4 \ + --hash=sha256:e0fe5dbea40516b661ef1bcfe0bd9461c2847c4ef4bb40012324f2454fb7d56d + # via -r requirements.in pytest-cov==3.0.0 \ --hash=sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6 \ --hash=sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470 @@ -806,8 +814,9 @@ verboselogs==1.7 \ --hash=sha256:e33ddedcdfdafcb3a174701150430b11b46ceb64c2a9a26198c76a156568e427 # via # -r requirements.in - # envoy.base.runner + # aio.run.runner # envoy.distribution.distrotest + # envoy.distribution.repo # envoy.github.abstract # envoy.github.release # envoy.gpg.sign diff --git a/tools/code_format/python_check.py b/tools/code_format/python_check.py index 135b7e9fd3ff..4b2b6fd59f9b 100755 --- a/tools/code_format/python_check.py +++ b/tools/code_format/python_check.py @@ -48,7 +48,7 @@ def path(self) -> pathlib.Path: def main(*args) -> int: - return EnvoyPythonChecker(*args).run() + return EnvoyPythonChecker(*args)() if __name__ == "__main__": diff --git a/tools/dependency/BUILD b/tools/dependency/BUILD index e595e8eb3fb4..9d2c7264b832 100644 --- a/tools/dependency/BUILD +++ b/tools/dependency/BUILD @@ -45,7 +45,8 @@ py_binary( "//source/extensions:extensions_build_config", ], deps = [ - "//tools/base:bazel_query", + "@envoy_repo", + requirement("aio.api.bazel"), ], ) diff --git a/tools/dependency/validate.py b/tools/dependency/validate.py index 91705591c03d..b5c9e26aab88 100755 --- a/tools/dependency/validate.py +++ b/tools/dependency/validate.py @@ -5,15 +5,15 @@ the use_category metadata in bazel/repository_locations.bzl. """ +import asyncio import json import pathlib import re import sys -from functools import lru_cache -from envoy.base.utils import BazelQueryError +from aio.api import bazel -from tools.base.bazel_query import query +import envoy_repo BAZEL_QUERY_EXTERNAL_DEP_RE = re.compile('@(\w+)//') EXTENSION_LABEL_RE = re.compile('(//source/extensions/.*):') @@ -48,6 +48,9 @@ def test_only_ignore(dep): return False +query = bazel.BazelEnv(envoy_repo.PATH).query + + class DependencyError(Exception): """Error in dependency relationships.""" pass @@ -98,7 +101,7 @@ def __init__(self, extensions_build_config, ignore_deps=IGNORE_DEPS, repository_ for untracked_dep in implied_untracked_deps: self._implied_untracked_deps_revmap[untracked_dep] = dep - def query_external_deps(self, *targets, exclude=None): + async def query_external_deps(self, *targets, exclude=None): """Query the build graph for transitive external dependencies. Args: @@ -114,22 +117,21 @@ def query_external_deps(self, *targets, exclude=None): exclude_query = self._filtered_deps_query(exclude) deps_query = f'{deps_query} - {exclude_query}' try: - deps = self._deps_query(deps_query) + deps = await self._deps_query(deps_query) if deps and exclude: # although the deps set is pre-filtered to exclude # the excluded deps, we still need to fetch the exclude set # again and remove any further matches, due to rev dep mangling. # The exclude set could be pre-filtered further (ie only members # of the revmap.values) at the cost of some additional complexity. - exclude_deps = self._deps_query(exclude_query) - except BazelQueryError as e: + exclude_deps = await self._deps_query(exclude_query) + except bazel.BazelQueryError as e: print(f'Bazel query failed with error {e}') raise e return deps - exclude_deps - @lru_cache(maxsize=5) - def _deps_query(self, query_string): - return self._mangle_deps_set(query(query_string)) + async def _deps_query(self, query_string): + return self._mangle_deps_set(await query(query_string)) def _filtered_deps_query(self, targets): return f'filter("^@.*//", deps(set({" ".join(targets)})))' @@ -168,16 +170,16 @@ def __init__(self, dep_info, build_graph): self._dep_info = dep_info self._build_graph = build_graph - def validate_build_graph_structure(self): + async def validate_build_graph_structure(self): """Validate basic assumptions about dependency relationship in the build graph. Raises: DependencyError: on a dependency validation error. """ print('Validating build dependency structure...') - queried_core_ext_deps = self._build_graph.query_external_deps( + queried_core_ext_deps = await self._build_graph.query_external_deps( self._core_rule_label, '//source/extensions/...', exclude=['//source/...']) - queried_all_deps = self._build_graph.query_external_deps( + queried_all_deps = await self._build_graph.query_external_deps( '//source/...', exclude=[self._core_rule_label, '//source/extensions/...']) if queried_all_deps or queried_core_ext_deps: raise DependencyError( @@ -185,14 +187,14 @@ def validate_build_graph_structure(self): 'deps(//source/exe:envoy_main_common_with_core_extensions_lib) ' 'union deps(//source/extensions/...)') - def validate_test_only_deps(self): + async def validate_test_only_deps(self): """Validate that test-only dependencies aren't included in //source/... Raises: DependencyError: on a dependency validation error. """ # Validate that //source doesn't depend on test_only - queried_source_deps = self._build_graph.query_external_deps('//source/...') + queried_source_deps = await self._build_graph.query_external_deps('//source/...') expected_test_only_deps = self._dep_info.deps_by_use_category('test_only') bad_test_only_deps = expected_test_only_deps.intersection(queried_source_deps) if len(bad_test_only_deps) > 0: @@ -200,7 +202,7 @@ def validate_test_only_deps(self): f'//source depends on test-only dependencies: {bad_test_only_deps}') # Validate that //test deps additional to those of //source are captured in # test_only. - marginal_test_deps = self._build_graph.query_external_deps( + marginal_test_deps = await self._build_graph.query_external_deps( '//test/...', exclude=['//source/...']) bad_test_deps = marginal_test_deps.difference(expected_test_only_deps) unknown_bad_test_deps = [dep for dep in bad_test_deps if not test_only_ignore(dep)] @@ -209,7 +211,7 @@ def validate_test_only_deps(self): raise DependencyError( f'Missing deps in test_only "use_category": {unknown_bad_test_deps}') - def validate_data_plane_core_deps(self): + async def validate_data_plane_core_deps(self): """Validate dataplane_core dependencies. Check that we at least tag as dataplane_core dependencies that match some @@ -221,7 +223,7 @@ def validate_data_plane_core_deps(self): # Necessary but not sufficient for dataplane. With some refactoring we could # probably have more precise tagging of dataplane/controlplane/other deps in # these paths. - queried_dataplane_core_min_deps = self._build_graph.query_external_deps( + queried_dataplane_core_min_deps = await self._build_graph.query_external_deps( '//source/common/api/...', '//source/common/buffer/...', '//source/common/crypto/...', '//source/common/conn_pool/...', '//source/common/formatter/...', '//source/common/http/...', '//source/common/ssl/...', '//source/common/tcp/...', @@ -238,7 +240,7 @@ def validate_data_plane_core_deps(self): f'"use_category" implied core deps {expected_dataplane_core_deps}: {bad_dataplane_core_deps} ' 'are missing') - def validate_control_plane_deps(self): + async def validate_control_plane_deps(self): """Validate controlplane dependencies. Check that we at least tag as controlplane dependencies that match some @@ -250,7 +252,7 @@ def validate_control_plane_deps(self): # Necessary but not sufficient for controlplane. With some refactoring we could # probably have more precise tagging of dataplane/controlplane/other deps in # these paths. - queried_controlplane_core_min_deps = self._build_graph.query_external_deps( + queried_controlplane_core_min_deps = await self._build_graph.query_external_deps( '//source/common/config/...') # Controlplane will always depend on API. expected_controlplane_core_deps = self._dep_info.deps_by_use_category('controlplane').union( @@ -264,7 +266,7 @@ def validate_control_plane_deps(self): f'by "use_category" implied core deps {expected_controlplane_core_deps}: ' f'{bad_controlplane_core_deps} are missing') - def validate_extension_deps(self, name, target): + async def validate_extension_deps(self, name, target): """Validate that extensions are correctly declared for dataplane_ext and observability_ext. Args: @@ -274,7 +276,7 @@ def validate_extension_deps(self, name, target): Raises: DependencyError: on a dependency validation error. """ - marginal_deps = self._build_graph.query_external_deps( + marginal_deps = await self._build_graph.query_external_deps( target, exclude=['//source/exe:envoy_main_common_with_core_extensions_lib']) expected_deps = [] print(f'Validating ({len(marginal_deps)}) {name} extension dependencies...') @@ -296,20 +298,20 @@ def validate_extension_deps(self, name, target): f'Extension {name} depends on {d} but {d} does not list {name} in its allowlist' ) - def validate_all(self): + async def validate_all(self): """Collection of all validations. Raises: DependencyError: on a dependency validation error. """ - self.validate_build_graph_structure() - self.validate_test_only_deps() - self.validate_data_plane_core_deps() - self.validate_control_plane_deps() + await self.validate_build_graph_structure() + await self.validate_test_only_deps() + await self.validate_data_plane_core_deps() + await self.validate_control_plane_deps() # Validate the marginal dependencies introduced for each extension. for name, target in sorted(build_graph.list_extensions()): target_all = EXTENSION_LABEL_RE.match(target).group(1) + '/...' - self.validate_extension_deps(name, target_all) + await self.validate_extension_deps(name, target_all) if __name__ == '__main__': @@ -319,7 +321,7 @@ def validate_all(self): build_graph = BuildGraph(extensions_build_config, repository_locations=repository_locations) validator = Validator(dep_info, build_graph) try: - validator.validate_all() + asyncio.run(validator.validate_all()) except DependencyError as e: print( 'Dependency validation failed, please check metadata in bazel/repository_locations.bzl') diff --git a/tools/dependency/validate_test.py b/tools/dependency/validate_test.py index 01f1bc073228..14f8eb7e793a 100755 --- a/tools/dependency/validate_test.py +++ b/tools/dependency/validate_test.py @@ -1,6 +1,7 @@ #!/usr/bin/env python3 """Tests for validate.py""" +import asyncio import unittest import validate @@ -26,10 +27,10 @@ def __init__(self, reachable_deps, extensions): self._reachable_deps = reachable_deps self._extensions = extensions - def query_external_deps(self, *targets, exclude=None): + async def query_external_deps(self, *targets, exclude=None): result = set(sum((self._reachable_deps.get(t, []) for t in targets), [])) if exclude: - result = result - self.query_external_deps(*exclude) + result = result - await self.query_external_deps(*exclude) return result def list_extensions(self): @@ -52,7 +53,7 @@ def test_valid_build_graph_structure(self): '//source/extensions/...': ['b'], '//source/...': ['a', 'b'] }) - validator.validate_build_graph_structure() + asyncio.run(validator.validate_build_graph_structure()) def test_invalid_build_graph_structure(self): validator = self.build_validator({}, { @@ -61,41 +62,46 @@ def test_invalid_build_graph_structure(self): '//source/...': ['a', 'b', 'c'] }) self.assertRaises( - validate.DependencyError, lambda: validator.validate_build_graph_structure()) + validate.DependencyError, + lambda: asyncio.run(validator.validate_build_graph_structure())) def test_valid_test_only_deps(self): validator = self.build_validator({'a': fake_dep('dataplane_core')}, {'//source/...': ['a']}) - validator.validate_test_only_deps() + asyncio.run(validator.validate_test_only_deps()) validator = self.build_validator({'a': fake_dep('test_only')}, {'//test/...': ['a', 'b__pip3']}) - validator.validate_test_only_deps() + asyncio.run(validator.validate_test_only_deps()) def test_invalid_test_only_deps(self): validator = self.build_validator({'a': fake_dep('test_only')}, {'//source/...': ['a']}) - self.assertRaises(validate.DependencyError, lambda: validator.validate_test_only_deps()) + self.assertRaises( + validate.DependencyError, lambda: asyncio.run(validator.validate_test_only_deps())) validator = self.build_validator({'a': fake_dep('test_only')}, {'//test/...': ['b']}) - self.assertRaises(validate.DependencyError, lambda: validator.validate_test_only_deps()) + self.assertRaises( + validate.DependencyError, lambda: asyncio.run(validator.validate_test_only_deps())) def test_valid_dataplane_core_deps(self): validator = self.build_validator({'a': fake_dep('dataplane_core')}, {'//source/common/http/...': ['a']}) - validator.validate_data_plane_core_deps() + asyncio.run(validator.validate_data_plane_core_deps()) def test_invalid_dataplane_core_deps(self): validator = self.build_validator({'a': fake_dep('controlplane')}, {'//source/common/http/...': ['a']}) self.assertRaises( - validate.DependencyError, lambda: validator.validate_data_plane_core_deps()) + validate.DependencyError, + lambda: asyncio.run(validator.validate_data_plane_core_deps())) def test_valid_controlplane_deps(self): validator = self.build_validator({'a': fake_dep('controlplane')}, {'//source/common/config/...': ['a']}) - validator.validate_control_plane_deps() + asyncio.run(validator.validate_control_plane_deps()) def test_invalid_controlplane_deps(self): validator = self.build_validator({'a': fake_dep('other')}, {'//source/common/config/...': ['a']}) - self.assertRaises(validate.DependencyError, lambda: validator.validate_control_plane_deps()) + self.assertRaises( + validate.DependencyError, lambda: asyncio.run(validator.validate_control_plane_deps())) def test_valid_extension_deps(self): validator = self.build_validator({ @@ -105,7 +111,7 @@ def test_valid_extension_deps(self): '//source/extensions/foo/...': ['a', 'b'], '//source/exe:envoy_main_common_with_core_extensions_lib': ['a'] }) - validator.validate_extension_deps('foo', '//source/extensions/foo/...') + asyncio.run(validator.validate_extension_deps('foo', '//source/extensions/foo/...')) def test_invalid_extension_deps_wrong_category(self): validator = self.build_validator({ @@ -116,8 +122,8 @@ def test_invalid_extension_deps_wrong_category(self): '//source/exe:envoy_main_common_with_core_extensions_lib': ['a'] }) self.assertRaises( - validate.DependencyError, - lambda: validator.validate_extension_deps('foo', '//source/extensions/foo/...')) + validate.DependencyError, lambda: asyncio.run( + validator.validate_extension_deps('foo', '//source/extensions/foo/...'))) def test_invalid_extension_deps_allowlist(self): validator = self.build_validator({ @@ -128,8 +134,8 @@ def test_invalid_extension_deps_allowlist(self): '//source/exe:envoy_main_common_with_core_extensions_lib': ['a'] }) self.assertRaises( - validate.DependencyError, - lambda: validator.validate_extension_deps('foo', '//source/extensions/foo/...')) + validate.DependencyError, lambda: asyncio.run( + validator.validate_extension_deps('foo', '//source/extensions/foo/...'))) if __name__ == '__main__': diff --git a/tools/docs/BUILD b/tools/docs/BUILD index cbe35c48187d..5ddcd17d2cc7 100644 --- a/tools/docs/BUILD +++ b/tools/docs/BUILD @@ -48,5 +48,5 @@ envoy_entry_point( envoy_py_binary( name = "tools.docs.rst_check", data = ["//docs:root/version_history/current.rst"], - deps = [requirement("envoy.base.checker")], + deps = [requirement("aio.run.checker")], ) diff --git a/tools/docs/rst_check.py b/tools/docs/rst_check.py index e6fcc2e52831..bc5108a9ed4c 100644 --- a/tools/docs/rst_check.py +++ b/tools/docs/rst_check.py @@ -4,7 +4,7 @@ from functools import cached_property from typing import Iterator, List, Pattern -from envoy.base import checker +from aio.run import checker INVALID_REFLINK = r".* ref:.*" REF_WITH_PUNCTUATION_REGEX = r".*\. <[^<]*>`\s*" @@ -141,7 +141,7 @@ def set_tokens(self, line: str = "", first_word: str = "", next_word: str = "") class RSTChecker(checker.Checker): checks = ("current_version",) - def check_current_version(self) -> None: + async def check_current_version(self) -> None: errors = list( CurrentVersionFile(pathlib.Path("docs/root/version_history/current.rst")).run_checks()) if errors: @@ -149,7 +149,7 @@ def check_current_version(self) -> None: def main(*args: str) -> int: - return RSTChecker(*args).run() + return RSTChecker(*args)() if __name__ == "__main__": diff --git a/tools/docs/tests/test_rst_check.py b/tools/docs/tests/test_rst_check.py index 91e91ced246c..417b728b18eb 100644 --- a/tools/docs/tests/test_rst_check.py +++ b/tools/docs/tests/test_rst_check.py @@ -341,7 +341,7 @@ def test_rst_checker_constructor(): @pytest.mark.parametrize("errors", [[], ["err1", "err2"]]) -def test_rst_checker_check_current_version(patches, errors): +async def test_rst_checker_check_current_version(patches, errors): checker = rst_check.RSTChecker("path1", "path2", "path3") patched = patches( "pathlib", @@ -351,7 +351,7 @@ def test_rst_checker_check_current_version(patches, errors): with patched as (m_plib, m_version, m_error): m_version.return_value.run_checks.return_value = errors - checker.check_current_version() + assert not await checker.check_current_version() assert ( list(m_plib.Path.call_args) diff --git a/tools/extensions/BUILD b/tools/extensions/BUILD index 3e8b94f3d5c4..cd203a86f500 100644 --- a/tools/extensions/BUILD +++ b/tools/extensions/BUILD @@ -22,7 +22,7 @@ envoy_py_binary( "//test/extensions/filters/network/common/fuzz:uber_per_readfilter.cc", ], deps = [ - requirement("envoy.base.checker"), + requirement("aio.run.checker"), requirement("envoy.base.utils"), ], ) diff --git a/tools/extensions/extensions_check.py b/tools/extensions/extensions_check.py index bfb75d549547..b664e3bfb814 100644 --- a/tools/extensions/extensions_check.py +++ b/tools/extensions/extensions_check.py @@ -9,7 +9,9 @@ from functools import cached_property from typing import Iterator -from envoy.base import checker, utils +from aio.run import checker + +from envoy.base import utils BUILTIN_EXTENSIONS = ( "envoy.request_id.uuid", "envoy.upstreams.tcp.generic", "envoy.transport_sockets.tls", @@ -124,7 +126,7 @@ def add_arguments(self, parser): parser.add_argument("--contrib_build_config") parser.add_argument("--core_extensions") - def check_fuzzed(self) -> None: + async def check_fuzzed(self) -> None: if self.robust_to_downstream_count == self.fuzzed_count: return self.error( @@ -133,13 +135,13 @@ def check_fuzzed(self) -> None: f"downstreams are fuzzed by adding them to filterNames() in {FUZZ_TEST_PATH}" ]) - def check_metadata(self) -> None: + async def check_metadata(self) -> None: for extension in self.metadata: errors = self._check_metadata(extension) if errors: self.error("metadata", errors) - def check_registered(self) -> None: + async def check_registered(self) -> None: only_metadata = set(self.metadata.keys()) - self.all_extensions missing_metadata = self.all_extensions - set(self.metadata.keys()) @@ -186,7 +188,7 @@ def _check_metadata_status(self, extension: str) -> Iterator[str]: def main(*args) -> int: - return ExtensionsChecker(*args).run() + return ExtensionsChecker(*args)() if __name__ == "__main__": diff --git a/tools/extensions/tests/test_extensions_check.py b/tools/extensions/tests/test_extensions_check.py index 7530500498c3..d96886589550 100644 --- a/tools/extensions/tests/test_extensions_check.py +++ b/tools/extensions/tests/test_extensions_check.py @@ -162,7 +162,7 @@ def test_extensions_robust_to_downstream_count(): @pytest.mark.parametrize("robust", ["FUZZED_COUNT", "NOT_FUZZED_COUNT"]) -def test_extensions_check_fuzzed(patches, robust): +async def test_extensions_check_fuzzed(patches, robust): checker = extensions_check.ExtensionsChecker() patched = patches( ("ExtensionsChecker.robust_to_downstream_count", dict(new_callable=PropertyMock)), @@ -173,7 +173,7 @@ def test_extensions_check_fuzzed(patches, robust): with patched as (m_robust, m_fuzzed, m_error): m_fuzzed.return_value = "FUZZED_COUNT" m_robust.return_value = robust - checker.check_fuzzed() + assert not await checker.check_fuzzed() ERR_MESSAGE = ( "Check that all network filters robust against untrusted downstreams are fuzzed " @@ -191,7 +191,7 @@ def test_extensions_check_fuzzed(patches, robust): "meta_errors", [dict(foo=True, bar=False, baz=True), dict(foo=False, bar=False, baz=False)]) -def test_extensions_check_metadata(patches, meta_errors): +async def test_extensions_check_metadata(patches, meta_errors): checker = extensions_check.ExtensionsChecker() patched = patches( ("ExtensionsChecker.metadata", dict(new_callable=PropertyMock)), @@ -206,7 +206,7 @@ def _check(k): with patched as (m_meta, m_check, m_error): m_meta.return_value = meta_errors m_check.side_effect = _check - checker.check_metadata() + assert not await checker.check_metadata() assert ( list(list(c) for c in m_check.call_args_list) @@ -229,7 +229,7 @@ def _check(k): [("A", "B", "C", "D"), ("A", "B"), ("B", "C", "D")]) -def test_extensions_registered(patches, all_ext, metadata): +async def test_extensions_check_registered(patches, all_ext, metadata): checker = extensions_check.ExtensionsChecker() patched = patches( ("ExtensionsChecker.metadata", dict(new_callable=PropertyMock)), @@ -240,7 +240,7 @@ def test_extensions_registered(patches, all_ext, metadata): with patched as (m_meta, m_all, m_error): m_meta.return_value = {k: k for k in metadata} m_all.return_value = set(all_ext) - checker.check_registered() + assert not await checker.check_registered() if set(all_ext) == set(metadata): assert not m_error.called diff --git a/tools/testing/BUILD b/tools/testing/BUILD index 651e5db9c50f..c7371b96fd92 100644 --- a/tools/testing/BUILD +++ b/tools/testing/BUILD @@ -21,11 +21,13 @@ envoy_py_binary( "//:pytest.ini", ], deps = [ + requirement("aio.run.runner"), + requirement("envoy.base.utils"), + requirement("nest-asyncio"), requirement("pytest"), + requirement("pytest-asyncio"), requirement("pytest-cov"), requirement("pytest-patches"), - requirement("envoy.base.runner"), - requirement("envoy.base.utils"), ], ) @@ -37,7 +39,7 @@ envoy_py_binary( ], deps = [ requirement("coverage"), - requirement("envoy.base.runner"), + requirement("aio.run.runner"), requirement("envoy.base.utils"), ], ) @@ -45,7 +47,9 @@ envoy_py_binary( envoy_py_binary( name = "tools.testing.all_pytests", deps = [ - requirement("envoy.base.checker"), - requirement("envoy.base.utils"), + "@envoy_repo", + requirement("aio.api.bazel"), + requirement("aio.core"), + requirement("aio.run.checker"), ], ) diff --git a/tools/testing/all_pytests.py b/tools/testing/all_pytests.py index ea4749763b48..abc0038ce987 100644 --- a/tools/testing/all_pytests.py +++ b/tools/testing/all_pytests.py @@ -12,12 +12,20 @@ from functools import cached_property from typing import Optional -from envoy.base import checker, runner +from aio.api import bazel +from aio.core.functional import async_property +from aio.run import checker +import envoy_repo -class PytestChecker(checker.BazelChecker): + +class PytestChecker(checker.Checker): checks = ("pytests",) + @cached_property + def bazel(self): + return bazel.BazelEnv(envoy_repo.PATH) + @property def cov_enabled(self) -> bool: return bool(self.args.cov_collect or self.args.cov_html) @@ -37,9 +45,13 @@ def pytest_bazel_args(self): if self.cov_enabled else []) - @cached_property - def pytest_targets(self) -> set: - return set(target for target in self.bazel.query("tools/...") if ":pytest_" in target) + @async_property(cache=True) + async def pytest_targets(self) -> set: + return set( + target + for target + in await self.bazel.query("tools/...") + if ":pytest_" in target) def add_arguments(self, parser): super().add_arguments(parser) @@ -52,28 +64,28 @@ def add_arguments(self, parser): default=None, help="Specify a path to collect html coverage with") - def check_pytests(self) -> None: - for target in self.pytest_targets: + async def check_pytests(self) -> None: + for target in await self.pytest_targets: try: - self.bazel.run(target, *self.pytest_bazel_args) + result = await self.bazel.run(target, *self.pytest_bazel_args) self.succeed("pytests", [target]) - except runner.BazelRunError: + except bazel.BazelRunError: self.error("pytests", [f"{target} failed"]) - def on_checks_begin(self): + async def on_checks_begin(self): if self.cov_path and os.path.exists(self.cov_path): os.unlink(self.cov_path) - def on_checks_complete(self): + async def on_checks_complete(self): if self.cov_html: - self.bazel.run( + await self.bazel.run( "//tools/testing:python_coverage", self.cov_path, self.cov_html) - return super().on_checks_complete() + return await super().on_checks_complete() def main(*args: str) -> Optional[int]: - return PytestChecker(*args).run() + return PytestChecker(*args)() if __name__ == "__main__": diff --git a/tools/testing/plugin.py b/tools/testing/plugin.py index d796d210b776..d1ea48a59fef 100644 --- a/tools/testing/plugin.py +++ b/tools/testing/plugin.py @@ -7,35 +7,19 @@ import pytest +import nest_asyncio -def _async_command_main(patches, main: Callable, handler: str, args: tuple) -> None: - parts = handler.split(".") - patched = patches("asyncio.run", parts.pop(), prefix=".".join(parts)) - - with patched as (m_run, m_handler): - assert main(*args) == m_run.return_value - - assert list(m_run.call_args) == [(m_handler.return_value.run.return_value,), {}] - assert list(m_handler.call_args) == [args, {}] - assert list(m_handler.return_value.run.call_args) == [(), {}] +nest_asyncio.apply() -def _command_main( - patches, - main: Callable, - handler: str, - args=("arg0", "arg1", "arg2"), - async_run: bool = False) -> None: - if async_run: - return _async_command_main(patches, main, handler, args=args) - - patched = patches(handler) +def _command_main(patches, main: Callable, handler: str, args=("arg0", "arg1", "arg2")) -> None: + parts = handler.split(".") + patched = patches(parts.pop(), prefix=".".join(parts)) with patched as (m_handler,): - assert main(*args) == m_handler.return_value.run.return_value - - assert list(m_handler.call_args) == [args, {}] - assert list(m_handler.return_value.run.call_args) == [(), {}] + assert main(*args) == m_handler.return_value.return_value + assert m_handler.call_args == [args, {}] + assert m_handler.return_value.call_args == [(), {}] @pytest.fixture diff --git a/tools/testing/python_coverage.py b/tools/testing/python_coverage.py index 4cef34e130ea..d91f4feff4d2 100755 --- a/tools/testing/python_coverage.py +++ b/tools/testing/python_coverage.py @@ -17,7 +17,9 @@ from coverage import cmdline # type:ignore -from envoy.base import runner, utils +from aio.run import runner + +from envoy.base import utils class CoverageRunner(runner.Runner): @@ -41,7 +43,7 @@ def add_arguments(self, parser: argparse.ArgumentParser) -> None: def coverage_args(self, coveragerc: str) -> list: return ["html"] + self.extra_args + [f"--rcfile={coveragerc}", "-d", self.cov_html] - def run(self) -> int: + async def run(self) -> int: if not self.cov_data: return cmdline.main(self.extra_args) @@ -50,7 +52,7 @@ def run(self) -> int: def main(*args) -> int: - return CoverageRunner(*args).run() + return CoverageRunner(*args)() if __name__ == "__main__": diff --git a/tools/testing/python_pytest.py b/tools/testing/python_pytest.py index 86e7452ce22b..220e6c46872e 100755 --- a/tools/testing/python_pytest.py +++ b/tools/testing/python_pytest.py @@ -16,7 +16,9 @@ import pytest -from envoy.base import runner, utils +from aio.run import runner + +from envoy.base import utils class PytestRunner(runner.Runner): @@ -34,7 +36,7 @@ def add_arguments(self, parser: argparse.ArgumentParser) -> None: def pytest_args(self, coveragerc: str) -> list: return self.extra_args + [f"--cov-config={coveragerc}"] - def run(self) -> int: + async def run(self) -> int: if not self.cov_collect: return pytest.main(self.extra_args) @@ -43,7 +45,7 @@ def run(self) -> int: def main(*args) -> int: - return PytestRunner(*args).run() + return PytestRunner(*args)() if __name__ == "__main__": diff --git a/tools/testing/tests/test_all_pytests.py b/tools/testing/tests/test_all_pytests.py index bdb36872782c..df6efd4cd2c9 100644 --- a/tools/testing/tests/test_all_pytests.py +++ b/tools/testing/tests/test_all_pytests.py @@ -1,9 +1,9 @@ -from unittest.mock import patch, MagicMock, PropertyMock +from unittest.mock import patch, AsyncMock, MagicMock, PropertyMock import pytest -from envoy.base.runner import BazelRunError +from aio.api.bazel import BazelRunError from tools.testing import all_pytests @@ -87,29 +87,32 @@ def test_all_pytests_pytest_bazel_args(patches, cov_enabled): assert "pytest_bazel_args" not in checker.__dict__ -def test_all_pytests_pytest_targets(): +async def test_all_pytests_pytest_targets(): checker = all_pytests.PytestChecker("path1", "path2", "path3") bazel_mock = patch("tools.testing.all_pytests.PytestChecker.bazel", new_callable=PropertyMock) with bazel_mock as m_bazel: - m_bazel.return_value.query.return_value = [ - "foo", ":pytest_foo", - ":notpytest_foo", ":not_foo", - "bar", "//asdf:pytest_barbaz"] + m_bazel.return_value.query = AsyncMock( + return_value=[ + "foo", ":pytest_foo", + ":notpytest_foo", ":not_foo", + "bar", "//asdf:pytest_barbaz"]) assert ( - checker.pytest_targets + await checker.pytest_targets == set([":pytest_foo", "//asdf:pytest_barbaz"])) assert ( list(m_bazel.return_value.query.call_args) == [('tools/...',), {}]) -def test_all_pytests_add_arguments(): +def test_all_pytests_add_arguments(patches): checker = all_pytests.PytestChecker("path1", "path2", "path3") + patched = patches( + "checker.Checker.add_arguments", + prefix="tools.testing.all_pytests") parser = MagicMock() - super_mock = patch("tools.testing.all_pytests.checker.BazelChecker.add_arguments") - with super_mock as m_super: + with patched as (m_super, ): checker.add_arguments(parser) assert ( @@ -125,9 +128,7 @@ def test_all_pytests_add_arguments(): 'help': 'Specify a path to collect html coverage with'}]]) - - -def test_all_pytests_check_pytests(patches): +async def test_all_pytests_check_pytests(patches): checker = all_pytests.PytestChecker("path1", "path2", "path3") patched = patches( "PytestChecker.error", @@ -145,14 +146,14 @@ def test_all_pytests_check_pytests(patches): check6=False, check7=True) - def _run_bazel(target): + async def _run_bazel(target): if not check_runs[target]: raise BazelRunError() with patched as (m_error, m_succeed, m_targets, m_bazel): - m_targets.return_value = check_runs.keys() - m_bazel.return_value.run.side_effect = _run_bazel - checker.check_pytests() + m_targets.side_effect = AsyncMock(return_value=check_runs.keys()) + m_bazel.return_value.run.side_effect = AsyncMock(side_effect=_run_bazel) + assert not await checker.check_pytests() assert ( list(list(c) for c in m_bazel.return_value.run.call_args_list) @@ -178,7 +179,7 @@ def _run_bazel(target): @pytest.mark.parametrize("exists", [True, False]) @pytest.mark.parametrize("cov_path", ["", "SOMEPATH"]) -def test_all_pytests_on_checks_begin(patches, exists, cov_path): +async def test_all_pytests_on_checks_begin(patches, exists, cov_path): checker = all_pytests.PytestChecker("path1", "path2", "path3") patched = patches( "os.path.exists", @@ -189,7 +190,7 @@ def test_all_pytests_on_checks_begin(patches, exists, cov_path): with patched as (m_exists, m_unlink, m_cov_path): m_cov_path.return_value = cov_path m_exists.return_value = exists - checker.on_checks_begin() + assert not await checker.on_checks_begin() if cov_path and exists: assert ( @@ -200,7 +201,7 @@ def test_all_pytests_on_checks_begin(patches, exists, cov_path): @pytest.mark.parametrize("cov_html", ["", "SOMEPATH"]) -def test_all_pytests_on_checks_complete(patches, cov_html): +async def test_all_pytests_on_checks_complete(patches, cov_html): checker = all_pytests.PytestChecker("path1", "path2", "path3") patched = patches( ("PytestChecker.bazel", dict(new_callable=PropertyMock)), @@ -211,7 +212,8 @@ def test_all_pytests_on_checks_complete(patches, cov_html): with patched as (m_bazel, m_complete, m_cov_path, m_cov_html): m_cov_html.return_value = cov_html - assert checker.on_checks_complete() == m_complete.return_value + m_bazel.return_value.run = AsyncMock() + assert await checker.on_checks_complete() == m_complete.return_value assert ( list(m_complete.call_args) == [(), {}]) diff --git a/tools/testing/tests/test_python_coverage.py b/tools/testing/tests/test_python_coverage.py index eb6bbd40571b..6372d24e70c4 100644 --- a/tools/testing/tests/test_python_coverage.py +++ b/tools/testing/tests/test_python_coverage.py @@ -56,7 +56,7 @@ def test_coveragepytest_add_arguments(): @pytest.mark.parametrize("cov_data", ["", "SOMEPATH"]) -def test_coverage_run(patches, cov_data): +async def test_coverage_run(patches, cov_data): runner = python_coverage.CoverageRunner("path1", "path2", "path3") patched = patches( ("CoverageRunner.cov_data", dict(new_callable=PropertyMock)), @@ -68,7 +68,7 @@ def test_coverage_run(patches, cov_data): with patched as (m_cov_data, m_extra_args, m_cov_args, m_cov_rc, m_main): m_cov_data.return_value = cov_data - assert runner.run() == m_main.return_value + assert await runner.run() == m_main.return_value if not cov_data: assert ( diff --git a/tools/testing/tests/test_python_pytest.py b/tools/testing/tests/test_python_pytest.py index 637403674daf..ca4cf39233c4 100644 --- a/tools/testing/tests/test_python_pytest.py +++ b/tools/testing/tests/test_python_pytest.py @@ -53,7 +53,7 @@ def test_pytest_pytest_args(patches): @pytest.mark.parametrize("cov_data", ["", "SOMEPATH"]) -def test_pytest_run(patches, cov_data): +async def test_pytest_run(patches, cov_data): runner = python_pytest.PytestRunner("path1", "path2", "path3") patched = patches( ("PytestRunner.cov_collect", dict(new_callable=PropertyMock)), @@ -65,7 +65,7 @@ def test_pytest_run(patches, cov_data): with patched as (m_cov_data, m_extra_args, m_py_args, m_cov_rc, m_main): m_cov_data.return_value = cov_data - assert runner.run() == m_main.return_value + assert await runner.run() == m_main.return_value if not cov_data: assert ( @@ -109,102 +109,37 @@ def test_plugin_command_main(patches): == [(m_command, patches), {}]) -@pytest.mark.parametrize("args", [None, (), tuple(f"ARG{i}" for i in range(0, 3))]) -@pytest.mark.parametrize("async_run", [None, True, False]) @pytest.mark.parametrize("raises", [None, "main", "handler", "run"]) -def test_plugin__command_main(patches, args, async_run, raises): - patched = patches( - "_async_command_main", - prefix="tools.testing.plugin") - _args = ("arg0", "arg1", "arg2") if args is None else args +def test_plugin__command_main(raises): _m_handler = MagicMock() _patches = MagicMock() _patches.return_value.__enter__.return_value = (_m_handler, ) main = MagicMock() handler = MagicMock() - kwargs = {} - if args is not None: - kwargs["args"] = args - if async_run is not None: - kwargs["async_run"] = async_run - if raises != "main": - main.return_value = _m_handler.return_value.run.return_value - if raises != "handler": - _m_handler(*_args) - else: - _m_handler("SOMETHING", "ELSE") - if raises != "run": - _m_handler.return_value.run() - else: - _m_handler.return_value.run("NOT", "RUN") - - with patched as (m_command, ): - if not raises or async_run: - result = plugin._command_main(_patches, main, handler, **kwargs) - else: - with pytest.raises(AssertionError) as e: - plugin._command_main(_patches, main, handler, **kwargs) - - if async_run: - assert result == m_command.return_value - assert ( - list(m_command.call_args) - == [(_patches, - main, - handler), - {'args': _args}]) - assert not _patches.called - assert not main.called - return - - assert not m_command.called - assert ( - list(_patches.call_args) - == [(handler,), {}]) - assert ( - list(main.call_args) - == [_args, {}]) - - if not raises: - assert not result - - -@pytest.mark.parametrize("raises", [None, "main", "aiorun", "handler", "run"]) -def test_plugin__async_command_main(raises): - _m_run = MagicMock() - _m_handler = MagicMock() - _patches = MagicMock() - _patches.return_value.__enter__.return_value = (_m_run, _m_handler) - main = MagicMock() - handler = MagicMock() handler.split.return_value = [f"PART{i}" for i in range(0, 3)] args = ("arg0", "arg1", "arg2") if raises != "main": - main.return_value = _m_run.return_value + main.return_value = _m_handler.return_value.return_value - if raises != "aiorun": - _m_run(_m_handler.return_value.run.return_value) - else: - _m_run("NOT", "AIORUN") if raises != "handler": _m_handler(*args) else: _m_handler("SOMETHING", "ELSE") if raises != "run": - _m_handler.return_value.run() + _m_handler.return_value() else: - _m_handler.return_value.run("NOT", "RUN") + _m_handler.return_value("NOT", "RUN") if not raises: - assert not plugin._async_command_main(_patches, main, handler, args) + assert not plugin._command_main(_patches, main, handler, args) else: with pytest.raises(AssertionError): - plugin._async_command_main(_patches, main, handler, args) + plugin._command_main(_patches, main, handler, args) assert ( list(_patches.call_args) - == [('asyncio.run', 'PART2'), {'prefix': 'PART0.PART1'}]) + == [('PART2', ), {'prefix': 'PART0.PART1'}]) assert ( list(handler.split.call_args) == [('.',), {}])