From fff5ed8ad2ee071896a94133909adf220a8e47d9 Mon Sep 17 00:00:00 2001 From: monkut Date: Wed, 27 Jul 2022 09:52:43 +0900 Subject: [PATCH] Review/Cleanup of master for release prep (#1147) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * :wrench: update for case where expected metadata for given package does not contain 'releases' * :penicl: add warning for case where expected `package_version` is not found. * :wrench: change pipenv req to version greater than previously reported problem version. * ๐Ÿ™ˆ Add `Pipfile.lock` to `.gitignore` (When supporting multiple versions of python with pipenv don't include the lock file. * :wrench: fix pipenv version restriction. * :art: run black on `core.py` * :art: fix flake8, run black * :wrench: adjust caching (requirements.txt no longer used. * :bug: fix Pipfile typo * :fire: The python3.6 runtime is deprecated by aws (July 18, 2022 [phase-1], Aug 17, 2022 [phase-2]) remove from tests for next release candidate * :art: run black :wrench: update black check command. * :art: run isort * :pencil: change version 0.54.1 -> 0.55.1 :fire: remove 3.6 from SUPPORTED_VERSIONS * :fire: remove 3.6 support * ๐Ÿ”€ merge with `0.54.2-release` branch to apply 3.9 support documentation updates. :fire: remove 3.6 support from README * ๐Ÿ”€ merge with `0.54.2-release` branch to apply 3.9 support documentation updates. * โช๏ธrevert __version__ back to available pypi version Without a pypi available release the `test_slim_handler` testcase fails. ``` ERROR: No matching distribution found for zappa==0.55.1 E................... ====================================================================== ERROR: test_slim_handler (tests.tests.TestZappa) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/runner/work/Zappa/Zappa/tests/tests.py", line 2211, in test_slim_handler zappa_cli.create_package() File "/home/runner/work/Zappa/Zappa/zappa/cli.py", line 2430, in create_package venv=self.zappa.create_handler_venv(), File "/home/runner/work/Zappa/Zappa/zappa/core.py", line 494, in create_handler_venv raise EnvironmentError("Pypi lookup failed") OSError: Pypi lookup failed -------------------- >> begin captured stdout << --------------------- ``` * :pencil: update CHANGELOG with committed changes since last release (5b6e241) * :fire: remove 3.6 from docs * :wrench: address review comments * :sparkles: change release to 0.55.1 * :recycle: Update to allow specification of the latest pypi version for `test_slim_handler()` * :white_check_mark: update `test_slim_handler()` so that lastest pypi version is *not* hardcoded (get from "git tags" result) * :art: run black/isort * :wrench: attempt to update github action to retrieve the latest tags. * :wrench: fix bad ci command definition * :pencil: created and added issue for python3.6 support removal. * :sparkles: add `python_requires` to setup(). * :pencil: fix issue #900 * :pencil: add link to https://slackautoinviter.herokuapp.com to join slack. (the zappateam.slack may be private?) * :fire: remove `future` (past) requirements basestring usage, replacing with isinstance(x, str) :wrench: change usage of utcnow() -> datetime.datetime.now(datetime.timezone.utc) * :fire: Remove 'futures' package requirement (#826) * :pencil: fix issue #900 * :fire: remove unnecessary __future__ import :wrench: Start versioning at 0; 0.55.1 -> 0.55.0 * :art: fix flake8 * :bug: fix cd.yaml to only run on push of tag meeting the release tag criteria. (#1152) * :pencil: add 1152 to CHANGELOG --- .github/ISSUE_TEMPLATE.md | 2 +- .github/PULL_REQUEST_TEMPLATE.md | 2 +- .github/workflows/cd.yaml | 7 +- .github/workflows/ci.yaml | 6 +- .gitignore | 1 + CHANGELOG.md | 20 + Makefile | 8 +- Pipfile | 4 +- README.md | 9 +- example/authmodule.py | 27 +- setup.py | 8 +- tests/data/test1.py | 4 +- tests/test_bot_exception_handler_settings.py | 4 +- tests/test_bot_handler_being_triggered.py | 4 +- tests/test_event_script_app.py | 3 - tests/test_handler.py | 4 +- tests/tests.py | 301 +++----- tests/tests_async.py | 8 +- tests/tests_async_old.py | 8 +- tests/tests_docs.py | 12 +- tests/tests_middleware.py | 28 +- tests/tests_placebo.py | 28 +- tests/utils.py | 4 +- zappa/__init__.py | 12 +- zappa/async.py | 4 +- zappa/asynchronous.py | 39 +- zappa/cli.py | 769 +++++-------------- zappa/core.py | 663 +++++----------- zappa/handler.py | 66 +- zappa/letsencrypt.py | 42 +- zappa/middleware.py | 6 +- zappa/utilities.py | 76 +- zappa/wsgi.py | 4 +- 33 files changed, 602 insertions(+), 1581 deletions(-) diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md index 199a7f494..1d510aa63 100644 --- a/.github/ISSUE_TEMPLATE.md +++ b/.github/ISSUE_TEMPLATE.md @@ -1,7 +1,7 @@ ## Context - + ## Expected Behavior diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 19115637a..dada7a22a 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -20,7 +20,7 @@ Before you submit this PR, please make sure that you meet these criteria: * Did you **make sure this code actually works on Lambda**, as well as locally? -* Did you test this code with all of **Python 3.6**, **Python 3.7** and **Python 3.8** ? +* Did you test this code with all of **Python 3.7**, **Python 3.8** and **Python 3.9** ? * Does this commit ONLY relate to the issue at hand and have your linter shit all over the code? diff --git a/.github/workflows/cd.yaml b/.github/workflows/cd.yaml index d18215a5d..e433735c1 100644 --- a/.github/workflows/cd.yaml +++ b/.github/workflows/cd.yaml @@ -1,7 +1,12 @@ name: CD on: # yamllint disable-line rule:truthy - create: + # From documentation: + # https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#onpushbranchestagsbranches-ignoretags-ignore + # on.push.tags: If you define only tags/tags-ignore or only branches/branches-ignore, the workflow won't run for events affecting the undefined Git ref. + # + # This workflow will only run when a tag matching the criteria is pushed + push: tags: ["v?[0-9]+.[0-9]+.[0-9]+"] jobs: diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index bebedb183..708f93098 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-20.04 strategy: matrix: - python: [3.6, 3.7, 3.8, 3.9] + python: [3.7, 3.8, 3.9] steps: - name: Checkout Code Repository uses: actions/checkout@v2 @@ -22,8 +22,10 @@ jobs: - uses: actions/cache@v2 with: path: ~/.cache/pip - key: ${{ runner.os }}-pip${{ matrix.python }}-${{ hashFiles('requirements.txt') }} + key: ${{ runner.os }}-pip${{ matrix.python }}-${{ hashFiles('Pipfile') }} restore-keys: ${{ runner.os }}-pip${{ matrix.python }}- + - name: make sure we have version tags + run: git fetch --unshallow --tags - name: Setup Virtualenv run: python -m venv .venv - name: Install diff --git a/.gitignore b/.gitignore index a198169bc..3ba494941 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ *~ *.zip +Pipfile.lock # Byte-compiled / optimized / DLL files __pycache__/ diff --git a/CHANGELOG.md b/CHANGELOG.md index 20ed898f1..b2a949025 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,25 @@ # Zappa Changelog +## 0.55.0 +* Fix "cd workflow fired event outside of the create tags event" (#1152) +* Remove 'futures' package requirement (#826,#808) +* Remove function invoke command does not need quotes around function (#900) +* Add python_requires to setup.py (#1111) +* Remove python 3.6 support (#1151) +* Update handler to be global if INSTANTIATE_LAMBDA_HANDLER_ON_IMPORT=True (#1096) +* Fix async invocation in Python 3 (#1006) +* Drastically reduce cold start times by calling LambdaHandler externally (#982) +* Support Newest Manylinux Version for dependencies (#1083) +* Decode zappa invoke output regardless of --no-color option (#1069) +* Convert project to using 'pipenv' to manage dependencies (#1077) +* Ensure unique scheduled event names (#1080) +* Check isort in CI (#1078) +* Use 'black' everywhere (#1076) +* Update setup.py (#1050) + +## 0.54.2 +* Update documentation to reflect python 3.9 support (#1137) + ## 0.54.1 * Increase Lambda client read timeout to 15m (#1065) * Unpin `Werkzeug` from `v0.x` (#1067) diff --git a/Makefile b/Makefile index 953175d07..4dd514337 100644 --- a/Makefile +++ b/Makefile @@ -27,7 +27,7 @@ clean: rm -f .coverage requirements: - pip install pipenv==2021.11.09 + pip install pipenv>2021.11.15 pipenv lock pipenv sync --dev @@ -39,10 +39,10 @@ mypy: mypy --show-error-codes --pretty --ignore-missing-imports --strict zappa tests black: - black . + black --line-length 127 . black-check: - black . --check + black --line-length 127 . --check @echo "If this fails, simply run: make black" isort: @@ -53,7 +53,7 @@ isort-check: flake: flake8 zappa --count --select=E9,F63,F7,F82 --show-source --statistics - flake8 zappa --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics + flake8 zappa --count --exit-zero --max-complexity=55 --max-line-length=127 --statistics --ignore F403,F405,E203,E231,E252,W503 test-docs: nosetests tests/tests_docs.py --with-coverage --cover-package=zappa --with-timer diff --git a/Pipfile b/Pipfile index b846e2959..0b6cf396f 100644 --- a/Pipfile +++ b/Pipfile @@ -16,13 +16,13 @@ mock = "*" mypy = "*" nose = "*" nose-timer = "*" -pipenv = "==2021.11.09" +pipenv = ">2021.11.15" +packaging = "*" [packages] argcomplete = "*" boto3 = ">=1.17.28" durationpy = "*" -future = "*" hjson = "*" jmespath = "*" kappa = "==0.6.0" diff --git a/README.md b/README.md index ae4bcae0f..911860667 100644 --- a/README.md +++ b/README.md @@ -135,7 +135,7 @@ __Awesome!__ ## Installation and Configuration -_Before you begin, make sure you are running Python 3.6/3.7/3.8 and you have a valid AWS account and your [AWS credentials file](https://blogs.aws.amazon.com/security/post/Tx3D6U6WSFGOK2H/A-New-and-Standardized-Way-to-Manage-Credentials-in-the-AWS-SDKs) is properly installed._ +_Before you begin, make sure you are running Python 3.7/3.8/3.9 and you have a valid AWS account and your [AWS credentials file](https://blogs.aws.amazon.com/security/post/Tx3D6U6WSFGOK2H/A-New-and-Standardized-Way-to-Manage-Credentials-in-the-AWS-SDKs) is properly installed._ **Zappa** can easily be installed through pip, like so: @@ -394,11 +394,11 @@ You can execute any function in your application directly at any time by using t For instance, suppose you have a basic application in a file called "my_app.py", and you want to invoke a function in it called "my_function". Once your application is deployed, you can invoke that function at any time by calling: - $ zappa invoke production 'my_app.my_function' + $ zappa invoke production my_app.my_function Any remote print statements made and the value the function returned will then be printed to your local console. **Nifty!** -You can also invoke interpretable Python 3.6/3.7/3.8 strings directly by using `--raw`, like so: +You can also invoke interpretable Python 3.7/3.8/3.9 strings directly by using `--raw`, like so: $ zappa invoke production "print(1 + 2 + 3)" --raw @@ -929,7 +929,7 @@ to change Zappa's behavior. Use these at your own risk! "role_name": "MyLambdaRole", // Name of Zappa execution role. Default --ZappaExecutionRole. To use a different, pre-existing policy, you must also set manage_roles to false. "role_arn": "arn:aws:iam::12345:role/app-ZappaLambdaExecutionRole", // ARN of Zappa execution role. Default to None. To use a different, pre-existing policy, you must also set manage_roles to false. This overrides role_name. Use with temporary credentials via GetFederationToken. "route53_enabled": true, // Have Zappa update your Route53 Hosted Zones when certifying with a custom domain. Default true. - "runtime": "python3.6", // Python runtime to use on Lambda. Can be one of "python3.6", "python3.7" or "python3.8". Defaults to whatever the current Python being used is. + "runtime": "python3.9", // Python runtime to use on Lambda. Can be one of "python3.7", "python3.8", or "python3.9". Defaults to whatever the current Python being used is. "s3_bucket": "dev-bucket", // Zappa zip bucket, "slim_handler": false, // Useful if project >50M. Set true to just upload a small handler to Lambda and load actual project from S3 at runtime. Default false. "settings_file": "~/Projects/MyApp/settings/dev_settings.py", // Server side settings file location, @@ -1538,6 +1538,7 @@ If you are adding a non-trivial amount of new code, please include a functioning Please include the GitHub issue or pull request URL that has discussion related to your changes as a comment in the code ([example](https://github.com/zappa/Zappa/blob/fae2925431b820eaedf088a632022e4120a29f89/zappa/zappa.py#L241-L243)). This greatly helps for project maintainability, as it allows us to trace back use cases and explain decision making. Similarly, please make sure that you meet all of the requirements listed in the [pull request template](https://raw.githubusercontent.com/zappa/Zappa/master/.github/PULL_REQUEST_TEMPLATE.md). Please feel free to work on any open ticket, especially any ticket marked with the "help-wanted" label. If you get stuck or want to discuss an issue further, please join [our Slack channel](https://zappateam.slack.com/), where you'll find a community of smart and interesting people working dilligently on hard problems. +[Zappa Slack Auto Invite](https://slackautoinviter.herokuapp.com) Zappa does not intend to conform to PEP8, isolate your commits so that changes to functionality with changes made by your linter. diff --git a/example/authmodule.py b/example/authmodule.py index 6223fea20..a5c8f47a9 100644 --- a/example/authmodule.py +++ b/example/authmodule.py @@ -106,17 +106,10 @@ def _addMethod(self, effect, verb, resource, conditions): the internal list contains a resource ARN and a condition statement. The condition statement can be null.""" if verb != "*" and not hasattr(HttpVerb, verb): - raise NameError( - "Invalid HTTP verb " + verb + ". Allowed verbs in HttpVerb class" - ) + raise NameError("Invalid HTTP verb " + verb + ". Allowed verbs in HttpVerb class") resourcePattern = re.compile(self.pathRegex) if not resourcePattern.match(resource): - raise NameError( - "Invalid resource path: " - + resource - + ". Path should match " - + self.pathRegex - ) + raise NameError("Invalid resource path: " + resource + ". Path should match " + self.pathRegex) if resource[:1] == "/": resource = resource[1:] @@ -137,13 +130,9 @@ def _addMethod(self, effect, verb, resource, conditions): ) if effect.lower() == "allow": - self.allowMethods.append( - {"resourceArn": resourceArn, "conditions": conditions} - ) + self.allowMethods.append({"resourceArn": resourceArn, "conditions": conditions}) elif effect.lower() == "deny": - self.denyMethods.append( - {"resourceArn": resourceArn, "conditions": conditions} - ) + self.denyMethods.append({"resourceArn": resourceArn, "conditions": conditions}) def _getEmptyStatement(self, effect): """Returns an empty statement object prepopulated with the correct action and the @@ -222,11 +211,7 @@ def build(self): "policyDocument": {"Version": self.version, "Statement": []}, } - policy["policyDocument"]["Statement"].extend( - self._getStatementForEffect("Allow", self.allowMethods) - ) - policy["policyDocument"]["Statement"].extend( - self._getStatementForEffect("Deny", self.denyMethods) - ) + policy["policyDocument"]["Statement"].extend(self._getStatementForEffect("Allow", self.allowMethods)) + policy["policyDocument"]["Statement"].extend(self._getStatementForEffect("Deny", self.denyMethods)) return policy diff --git a/setup.py b/setup.py index e9678c264..af89dea10 100755 --- a/setup.py +++ b/setup.py @@ -12,12 +12,10 @@ pipfile = ConfigParser() pipfile.read(Path(__file__).parent.resolve() / "Pipfile") required = [ - "{}{}".format(name, version.strip('"')) if version != '"*"' else name - for name, version in pipfile["packages"].items() + "{}{}".format(name, version.strip('"')) if version != '"*"' else name for name, version in pipfile["packages"].items() ] test_required = [ - "{}{}".format(name, version.strip('"')) if version != '"*"' else name - for name, version in pipfile["dev-packages"].items() + "{}{}".format(name, version.strip('"')) if version != '"*"' else name for name, version in pipfile["dev-packages"].items() ] setup( @@ -25,6 +23,7 @@ version=__version__, packages=["zappa"], install_requires=required, + python_requires=">=3.7, <3.10", tests_require=test_required, test_suite="nose.collector", include_package_data=True, @@ -46,7 +45,6 @@ "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Programming Language :: Python", - "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", diff --git a/tests/data/test1.py b/tests/data/test1.py index eec2c37ac..d4a378658 100644 --- a/tests/data/test1.py +++ b/tests/data/test1.py @@ -2,6 +2,4 @@ def greet(): - print( - "There is more stupidity than hydrogen in the universe, and it has a longer shelf life." - ) + print("There is more stupidity than hydrogen in the universe, and it has a longer shelf life.") diff --git a/tests/test_bot_exception_handler_settings.py b/tests/test_bot_exception_handler_settings.py index 978f4e94c..6c69e0968 100644 --- a/tests/test_bot_exception_handler_settings.py +++ b/tests/test_bot_exception_handler_settings.py @@ -10,7 +10,5 @@ LOG_LEVEL = "DEBUG" PROJECT_NAME = "wsgi_script_name_settings" COGNITO_TRIGGER_MAPPING = {} -AWS_BOT_EVENT_MAPPING = { - "intent-name:DialogCodeHook": "tests.test_handler.raises_exception" -} +AWS_BOT_EVENT_MAPPING = {"intent-name:DialogCodeHook": "tests.test_handler.raises_exception"} EXCEPTION_HANDLER = "tests.test_handler.mocked_exception_handler" diff --git a/tests/test_bot_handler_being_triggered.py b/tests/test_bot_handler_being_triggered.py index ec546da45..90ec6bbdb 100644 --- a/tests/test_bot_handler_being_triggered.py +++ b/tests/test_bot_handler_being_triggered.py @@ -10,6 +10,4 @@ LOG_LEVEL = "DEBUG" PROJECT_NAME = "wsgi_script_name_settings" COGNITO_TRIGGER_MAPPING = {} -AWS_BOT_EVENT_MAPPING = { - "intent-name:DialogCodeHook": "tests.test_handler.handle_bot_intent" -} +AWS_BOT_EVENT_MAPPING = {"intent-name:DialogCodeHook": "tests.test_handler.handle_bot_intent"} diff --git a/tests/test_event_script_app.py b/tests/test_event_script_app.py index 4f64f0873..5ffeee4ce 100644 --- a/tests/test_event_script_app.py +++ b/tests/test_event_script_app.py @@ -1,6 +1,3 @@ -from __future__ import print_function - - def handler_for_events(event, context): print("Event:", event) return True diff --git a/tests/test_handler.py b/tests/test_handler.py index eecb3dcab..cc0590128 100644 --- a/tests/test_handler.py +++ b/tests/test_handler.py @@ -56,9 +56,7 @@ def test_run_function(self): self.assertEqual(LambdaHandler.run_function(one_arg, "e", "c"), "e") self.assertEqual(LambdaHandler.run_function(two_args, "e", "c"), ("e", "c")) self.assertEqual(LambdaHandler.run_function(var_args, "e", "c"), ("e", "c")) - self.assertEqual( - LambdaHandler.run_function(var_args_with_one, "e", "c"), ("e", "c") - ) + self.assertEqual(LambdaHandler.run_function(var_args_with_one, "e", "c"), ("e", "c")) try: LambdaHandler.run_function(unsupported, "e", "c") diff --git a/tests/tests.py b/tests/tests.py index b319fbcd6..831574606 100644 --- a/tests/tests.py +++ b/tests/tests.py @@ -5,6 +5,7 @@ import json import os import random +import re import shutil import string import sys @@ -13,6 +14,7 @@ import uuid import zipfile from io import BytesIO +from subprocess import check_output import botocore import botocore.stub @@ -20,6 +22,7 @@ import mock from click.exceptions import ClickException from click.globals import resolve_color_default +from packaging import version from zappa.cli import ZappaCLI, disable_click_colors, shamelessly_promote from zappa.core import ALB_LAMBDA_ALIAS, ASSUME_POLICY, ATTACH_POLICY, Zappa @@ -101,9 +104,7 @@ def test_disable_click_colors(self): def test_copy_editable_packages(self, mock_remove, mock_find_packages): virtual_env = os.environ.get("VIRTUAL_ENV") if not virtual_env: - return self.skipTest( - "test_copy_editable_packages must be run in a virtualenv" - ) + return self.skipTest("test_copy_editable_packages must be run in a virtualenv") temp_package_dir = tempfile.mkdtemp() try: @@ -126,9 +127,9 @@ def test_copy_editable_packages(self, mock_remove, mock_find_packages): z = Zappa() mock_open = mock.mock_open(read_data=egg_path.encode("utf-8")) - with mock.patch("zappa.core.open", mock_open), mock.patch( - "glob.glob" - ) as mock_glob, mock.patch("zappa.core.copytree") as mock_copytree: + with mock.patch("zappa.core.open", mock_open), mock.patch("glob.glob") as mock_glob, mock.patch( + "zappa.core.copytree" + ) as mock_copytree: # we use glob.glob to get the egg-links in the temp packages # directory mock_glob.return_value = [temp_egg_link] @@ -161,23 +162,7 @@ def test_create_lambda_package(self): "zappa.core.Zappa.get_installed_packages", return_value=mock_installed_packages, ): - z = Zappa(runtime="python3.6") - path = z.create_lambda_zip(handler_file=os.path.realpath(__file__)) - self.assertTrue(os.path.isfile(path)) - os.remove(path) - - def test_get_manylinux_python36(self): - z = Zappa(runtime="python3.6") - self.assertIsNotNone(z.get_cached_manylinux_wheel("psycopg2", "2.7.1")) - self.assertIsNone(z.get_cached_manylinux_wheel("derpderpderpderp", "0.0")) - - # mock with a known manylinux wheel package so that code for downloading them gets invoked - mock_installed_packages = {"psycopg2": "2.7.1"} - with mock.patch( - "zappa.core.Zappa.get_installed_packages", - return_value=mock_installed_packages, - ): - z = Zappa(runtime="python3.6") + z = Zappa(runtime="python3.7") path = z.create_lambda_zip(handler_file=os.path.realpath(__file__)) self.assertTrue(os.path.isfile(path)) os.remove(path) @@ -253,34 +238,24 @@ def test_get_manylinux_python39(self): os.remove(path) def test_getting_installed_packages(self, *args): - z = Zappa(runtime="python3.6") + z = Zappa(runtime="python3.7") # mock pkg_resources call to be same as what our mocked site packages dir has - mock_package = collections.namedtuple( - "mock_package", ["project_name", "version", "location"] - ) - mock_pip_installed_packages = [ - mock_package("super_package", "0.1", "/venv/site-packages") - ] + mock_package = collections.namedtuple("mock_package", ["project_name", "version", "location"]) + mock_pip_installed_packages = [mock_package("super_package", "0.1", "/venv/site-packages")] with mock.patch("os.path.isdir", return_value=True): with mock.patch("os.listdir", return_value=["super_package"]): import pkg_resources # this gets called in non-test Zappa mode - with mock.patch( - "pkg_resources.WorkingSet", return_value=mock_pip_installed_packages - ): - self.assertDictEqual( - z.get_installed_packages("", ""), {"super_package": "0.1"} - ) + with mock.patch("pkg_resources.WorkingSet", return_value=mock_pip_installed_packages): + self.assertDictEqual(z.get_installed_packages("", ""), {"super_package": "0.1"}) def test_getting_installed_packages_mixed_case_location(self, *args): - z = Zappa(runtime="python3.6") + z = Zappa(runtime="python3.7") # mock pip packages call to be same as what our mocked site packages dir has - mock_package = collections.namedtuple( - "mock_package", ["project_name", "version", "location"] - ) + mock_package = collections.namedtuple("mock_package", ["project_name", "version", "location"]) mock_pip_installed_packages = [ mock_package("SuperPackage", "0.1", "/Venv/site-packages"), mock_package("SuperPackage64", "0.1", "/Venv/site-packages64"), @@ -290,13 +265,9 @@ def test_getting_installed_packages_mixed_case_location(self, *args): with mock.patch("os.listdir", return_value=[]): import pkg_resources # this gets called in non-test Zappa mode - with mock.patch( - "pkg_resources.WorkingSet", return_value=mock_pip_installed_packages - ): + with mock.patch("pkg_resources.WorkingSet", return_value=mock_pip_installed_packages): self.assertDictEqual( - z.get_installed_packages( - "/venv/Site-packages", "/venv/site-packages64" - ), + z.get_installed_packages("/venv/Site-packages", "/venv/site-packages64"), { "superpackage": "0.1", "superpackage64": "0.1", @@ -304,26 +275,18 @@ def test_getting_installed_packages_mixed_case_location(self, *args): ) def test_getting_installed_packages_mixed_case(self, *args): - z = Zappa(runtime="python3.6") + z = Zappa(runtime="python3.7") # mock pkg_resources call to be same as what our mocked site packages dir has - mock_package = collections.namedtuple( - "mock_package", ["project_name", "version", "location"] - ) - mock_pip_installed_packages = [ - mock_package("SuperPackage", "0.1", "/venv/site-packages") - ] + mock_package = collections.namedtuple("mock_package", ["project_name", "version", "location"]) + mock_pip_installed_packages = [mock_package("SuperPackage", "0.1", "/venv/site-packages")] with mock.patch("os.path.isdir", return_value=True): with mock.patch("os.listdir", return_value=["superpackage"]): import pkg_resources # this gets called in non-test Zappa mode - with mock.patch( - "pkg_resources.WorkingSet", return_value=mock_pip_installed_packages - ): - self.assertDictEqual( - z.get_installed_packages("", ""), {"superpackage": "0.1"} - ) + with mock.patch("pkg_resources.WorkingSet", return_value=mock_pip_installed_packages): + self.assertDictEqual(z.get_installed_packages("", ""), {"superpackage": "0.1"}) def test_load_credentials(self): z = Zappa() @@ -414,55 +377,39 @@ def test_create_api_gateway_routes_with_different_auth_methods(self): ) self.assertEqual( "NONE", - parsable_template["Resources"]["OPTIONS0"]["Properties"][ - "AuthorizationType" - ], + parsable_template["Resources"]["OPTIONS0"]["Properties"]["AuthorizationType"], ) self.assertEqual( "NONE", - parsable_template["Resources"]["OPTIONS1"]["Properties"][ - "AuthorizationType" - ], + parsable_template["Resources"]["OPTIONS1"]["Properties"]["AuthorizationType"], ) self.assertEqual( "MOCK", - parsable_template["Resources"]["OPTIONS0"]["Properties"]["Integration"][ - "Type" - ], + parsable_template["Resources"]["OPTIONS0"]["Properties"]["Integration"]["Type"], ) self.assertEqual( "MOCK", - parsable_template["Resources"]["OPTIONS1"]["Properties"]["Integration"][ - "Type" - ], + parsable_template["Resources"]["OPTIONS1"]["Properties"]["Integration"]["Type"], ) self.assertEqual( "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token'", - parsable_template["Resources"]["OPTIONS0"]["Properties"]["Integration"][ - "IntegrationResponses" - ][0]["ResponseParameters"][ - "method.response.header.Access-Control-Allow-Headers" - ], + parsable_template["Resources"]["OPTIONS0"]["Properties"]["Integration"]["IntegrationResponses"][0][ + "ResponseParameters" + ]["method.response.header.Access-Control-Allow-Headers"], ) self.assertEqual( "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token'", - parsable_template["Resources"]["OPTIONS1"]["Properties"]["Integration"][ - "IntegrationResponses" - ][0]["ResponseParameters"][ - "method.response.header.Access-Control-Allow-Headers" - ], + parsable_template["Resources"]["OPTIONS1"]["Properties"]["Integration"]["IntegrationResponses"][0][ + "ResponseParameters" + ]["method.response.header.Access-Control-Allow-Headers"], ) self.assertTrue( - parsable_template["Resources"]["OPTIONS0"]["Properties"]["MethodResponses"][ - 0 - ]["ResponseParameters"][ + parsable_template["Resources"]["OPTIONS0"]["Properties"]["MethodResponses"][0]["ResponseParameters"][ "method.response.header.Access-Control-Allow-Headers" ] ) self.assertTrue( - parsable_template["Resources"]["OPTIONS1"]["Properties"]["MethodResponses"][ - 0 - ]["ResponseParameters"][ + parsable_template["Resources"]["OPTIONS1"]["Properties"]["MethodResponses"][0]["ResponseParameters"][ "method.response.header.Access-Control-Allow-Headers" ] ) @@ -486,12 +433,8 @@ def test_create_api_gateway_routes_with_different_auth_methods(self): "AWS_IAM", parsable_template["Resources"]["GET1"]["Properties"]["AuthorizationType"], ) - self.assertEqual( - True, parsable_template["Resources"]["GET0"]["Properties"]["ApiKeyRequired"] - ) - self.assertEqual( - True, parsable_template["Resources"]["GET1"]["Properties"]["ApiKeyRequired"] - ) + self.assertEqual(True, parsable_template["Resources"]["GET0"]["Properties"]["ApiKeyRequired"]) + self.assertEqual(True, parsable_template["Resources"]["GET1"]["Properties"]["ApiKeyRequired"]) # Authorizer and IAM authorizer = { @@ -514,11 +457,7 @@ def test_create_api_gateway_routes_with_different_auth_methods(self): parsable_template["Resources"]["Authorizer"] # Authorizer with validation expression - invocations_uri = ( - "arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/" - + lambda_arn - + "/invocations" - ) + invocations_uri = "arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/" + lambda_arn + "/invocations" z.create_stack_template(lambda_arn, "helloworld", False, False, authorizer) parsable_template = json.loads(z.cf_template.to_json()) self.assertEqual( @@ -529,18 +468,14 @@ def test_create_api_gateway_routes_with_different_auth_methods(self): "CUSTOM", parsable_template["Resources"]["GET1"]["Properties"]["AuthorizationType"], ) - self.assertEqual( - "TOKEN", parsable_template["Resources"]["Authorizer"]["Properties"]["Type"] - ) + self.assertEqual("TOKEN", parsable_template["Resources"]["Authorizer"]["Properties"]["Type"]) self.assertEqual( "ZappaAuthorizer", parsable_template["Resources"]["Authorizer"]["Properties"]["Name"], ) self.assertEqual( 300, - parsable_template["Resources"]["Authorizer"]["Properties"][ - "AuthorizerResultTtlInSeconds" - ], + parsable_template["Resources"]["Authorizer"]["Properties"]["AuthorizerResultTtlInSeconds"], ) self.assertEqual( invocations_uri, @@ -548,15 +483,11 @@ def test_create_api_gateway_routes_with_different_auth_methods(self): ) self.assertEqual( z.credentials_arn, - parsable_template["Resources"]["Authorizer"]["Properties"][ - "AuthorizerCredentials" - ], + parsable_template["Resources"]["Authorizer"]["Properties"]["AuthorizerCredentials"], ) self.assertEqual( "xxx", - parsable_template["Resources"]["Authorizer"]["Properties"][ - "IdentityValidationExpression" - ], + parsable_template["Resources"]["Authorizer"]["Properties"]["IdentityValidationExpression"], ) # Authorizer without validation expression @@ -571,13 +502,9 @@ def test_create_api_gateway_routes_with_different_auth_methods(self): "CUSTOM", parsable_template["Resources"]["GET1"]["Properties"]["AuthorizationType"], ) - self.assertEqual( - "TOKEN", parsable_template["Resources"]["Authorizer"]["Properties"]["Type"] - ) + self.assertEqual("TOKEN", parsable_template["Resources"]["Authorizer"]["Properties"]["Type"]) with self.assertRaises(KeyError): - parsable_template["Resources"]["Authorizer"]["Properties"][ - "IdentityValidationExpression" - ] + parsable_template["Resources"]["Authorizer"]["Properties"]["IdentityValidationExpression"] # Authorizer with arn authorizer = { @@ -609,9 +536,7 @@ def test_update_aws_env_vars(self): # Simulate already having some AWS env vars remotely mock_client.get_function_configuration.return_value = { "PackageType": "Zip", - "Environment": { - "Variables": {"REMOTE_ONLY": "AAA", "CHANGED_REMOTE": "BBB"} - }, + "Environment": {"Variables": {"REMOTE_ONLY": "AAA", "CHANGED_REMOTE": "BBB"}}, } z.update_lambda_configuration( "test", @@ -633,9 +558,7 @@ def test_update_aws_env_vars(self): # Simulate already having some AWS env vars remotely but none set in aws_environment_variables mock_client.get_function_configuration.return_value = { "PackageType": "Zip", - "Environment": { - "Variables": {"REMOTE_ONLY_1": "AAA", "REMOTE_ONLY_2": "BBB"} - }, + "Environment": {"Variables": {"REMOTE_ONLY_1": "AAA", "REMOTE_ONLY_2": "BBB"}}, } z.update_lambda_configuration("test", "test", "test") end_result_should_be = {"REMOTE_ONLY_1": "AAA", "REMOTE_ONLY_2": "BBB"} @@ -650,9 +573,7 @@ def test_update_layers(self): with mock.patch.object(z, "lambda_client") as mock_client: mock_client.get_function_configuration.return_value = {"PackageType": "Zip"} - z.update_lambda_configuration( - "test", "test", "test", layers=["Layer1", "Layer2"] - ) + z.update_lambda_configuration("test", "test", "test", layers=["Layer1", "Layer2"]) self.assertEqual( mock_client.update_function_configuration.call_args[1]["Layers"], ["Layer1", "Layer2"], @@ -660,9 +581,7 @@ def test_update_layers(self): with mock.patch.object(z, "lambda_client") as mock_client: mock_client.get_function_configuration.return_value = {"PackageType": "Zip"} z.update_lambda_configuration("test", "test", "test") - self.assertEqual( - mock_client.update_function_configuration.call_args[1]["Layers"], [] - ) + self.assertEqual(mock_client.update_function_configuration.call_args[1]["Layers"], []) def test_update_empty_aws_env_hash(self): z = Zappa() @@ -1115,9 +1034,7 @@ def test_load_extended_settings(self): zappa_cli = ZappaCLI() zappa_cli.api_stage = "extendo2" zappa_cli.load_settings("test_settings.json") - self.assertEqual( - "lmbda2", zappa_cli.stage_config["s3_bucket"] - ) # Second Extension + self.assertEqual("lmbda2", zappa_cli.stage_config["s3_bucket"]) # Second Extension self.assertTrue(zappa_cli.stage_config["touch"]) # First Extension self.assertTrue(zappa_cli.stage_config["delete_local_zip"]) # The base @@ -1173,34 +1090,26 @@ def test_settings_extension(self): zappa_cli = ZappaCLI() # With all three, we should get the JSON file first. - self.assertEqual( - zappa_cli.get_json_or_yaml_settings(), "zappa_settings.json" - ) + self.assertEqual(zappa_cli.get_json_or_yaml_settings(), "zappa_settings.json") zappa_cli.load_settings_file() self.assertIn("lonely", zappa_cli.zappa_settings) os.unlink("zappa_settings.json") # Without the JSON file, we should get the TOML file. - self.assertEqual( - zappa_cli.get_json_or_yaml_settings(), "zappa_settings.toml" - ) + self.assertEqual(zappa_cli.get_json_or_yaml_settings(), "zappa_settings.toml") zappa_cli.load_settings_file() self.assertIn("ttt888", zappa_cli.zappa_settings) self.assertNotIn("devor", zappa_cli.zappa_settings) os.unlink("zappa_settings.toml") # With just the YAML file, we should get it. - self.assertEqual( - zappa_cli.get_json_or_yaml_settings(), "zappa_settings.yml" - ) + self.assertEqual(zappa_cli.get_json_or_yaml_settings(), "zappa_settings.yml") zappa_cli.load_settings_file() self.assertIn("ttt888", zappa_cli.zappa_settings) self.assertIn("devor", zappa_cli.zappa_settings) os.unlink("zappa_settings.yml") - self.assertEqual( - zappa_cli.get_json_or_yaml_settings(), "zappa_settings.yaml" - ) + self.assertEqual(zappa_cli.get_json_or_yaml_settings(), "zappa_settings.yaml") zappa_cli.load_settings_file() self.assertIn("ttt888", zappa_cli.zappa_settings) self.assertIn("devor", zappa_cli.zappa_settings) @@ -1248,9 +1157,7 @@ def test_cli_utility(self): zappa_cli.print_logs(logs, colorize=False, force_colorize=False) zappa_cli.print_logs(logs, colorize=False, force_colorize=True) zappa_cli.print_logs(logs, colorize=True, force_colorize=False) - zappa_cli.print_logs( - logs, colorize=True, non_http=False, http=False, force_colorize=True - ) + zappa_cli.print_logs(logs, colorize=True, non_http=False, http=False, force_colorize=True) zappa_cli.check_for_update() def test_cli_format_invoke_command(self): @@ -1493,9 +1400,7 @@ def test_cli_save_python_settings_file(self): def test_bad_json_catch(self): zappa_cli = ZappaCLI() - self.assertRaises( - ValueError, zappa_cli.load_settings_file, "tests/test_bad_settings.json" - ) + self.assertRaises(ValueError, zappa_cli.load_settings_file, "tests/test_bad_settings.json") def test_bad_stage_name_catch(self): zappa_cli = ZappaCLI() @@ -1506,9 +1411,7 @@ def test_bad_stage_name_catch(self): def test_bad_environment_vars_catch(self): zappa_cli = ZappaCLI() zappa_cli.api_stage = "ttt888" - self.assertRaises( - ValueError, zappa_cli.load_settings, "tests/test_bad_environment_vars.json" - ) + self.assertRaises(ValueError, zappa_cli.load_settings, "tests/test_bad_environment_vars.json") # @mock.patch('botocore.session.Session.full_config', new_callable=mock.PropertyMock) # def test_cli_init(self, mock_config): @@ -1765,9 +1668,7 @@ def get_domain_name(domain, *_args, **_kwargs): return zappa_mock.domain_names.get(domain) zappa_mock.get_domain_name.side_effect = get_domain_name - zappa_mock.get_lambda_function_versions.side_effect = ( - get_lambda_function_versions - ) + zappa_mock.get_lambda_function_versions.side_effect = get_lambda_function_versions zappa_cli.zappa = zappa_mock self.assertRaises(ClickException, zappa_cli.certify) @@ -1913,9 +1814,7 @@ def test_get_domain_respects_route53_setting(self, client, template): self.assertIsNotNone(record) zappa_core.apigateway_client.get_domain_name.assert_called_once() zappa_core.route53.list_hosted_zones.assert_called_once() - zappa_core.route53.list_resource_record_sets.assert_called_once_with( - HostedZoneId="somezone" - ) + zappa_core.route53.list_resource_record_sets.assert_called_once_with(HostedZoneId="somezone") @mock.patch("botocore.client") def test_get_all_zones_normal_case(self, client): @@ -2086,9 +1985,7 @@ def test_event_name(self): "this.is.my.dang.function.wassup.yeah.its.long", ) self.assertTrue(len(truncated) <= 64) - self.assertTrue( - truncated.endswith("this.is.my.dang.function.wassup.yeah.its.long") - ) + self.assertTrue(truncated.endswith("this.is.my.dang.function.wassup.yeah.its.long")) truncated = zappa.get_event_name( "basldfkjalsdkfjalsdkfjaslkdfjalsdkfjadlsfkjasdlfkjasdlfkjasdflkjasdf-asdfasdfasdfasdfasdf", "thisidoasdfaljksdfalskdjfalsdkfjasldkfjalsdkfjalsdkfjalsdfkjalasdfasdfasdfasdklfjasldkfjalsdkjfaslkdfjasldkfjasdflkjdasfskdj", @@ -2186,9 +2083,7 @@ def test_shameless(self): shamelessly_promote() def test_s3_url_parser(self): - remote_bucket, remote_file = parse_s3_url( - "s3://my-project-config-files/filename.json" - ) + remote_bucket, remote_file = parse_s3_url("s3://my-project-config-files/filename.json") self.assertEqual(remote_bucket, "my-project-config-files") self.assertEqual(remote_file, "filename.json") @@ -2196,9 +2091,7 @@ def test_s3_url_parser(self): self.assertEqual(remote_bucket, "your-bucket") self.assertEqual(remote_file, "account.key") - remote_bucket, remote_file = parse_s3_url( - "s3://my-config-bucket/super-secret-config.json" - ) + remote_bucket, remote_file = parse_s3_url("s3://my-config-bucket/super-secret-config.json") self.assertEqual(remote_bucket, "my-config-bucket") self.assertEqual(remote_file, "super-secret-config.json") @@ -2206,9 +2099,7 @@ def test_s3_url_parser(self): self.assertEqual(remote_bucket, "your-secure-bucket") self.assertEqual(remote_file, "account.key") - remote_bucket, remote_file = parse_s3_url( - "s3://your-bucket/subfolder/account.key" - ) + remote_bucket, remote_file = parse_s3_url("s3://your-bucket/subfolder/account.key") self.assertEqual(remote_bucket, "your-bucket") self.assertEqual(remote_file, "subfolder/account.key") @@ -2232,9 +2123,7 @@ def test_remote_env_package(self): zappa_cli = ZappaCLI() zappa_cli.api_stage = "remote_env" zappa_cli.load_settings("test_settings.json") - self.assertEqual( - "s3://lmbda-env/prod/env.json", zappa_cli.stage_config["remote_env"] - ) + self.assertEqual("s3://lmbda-env/prod/env.json", zappa_cli.stage_config["remote_env"]) zappa_cli.create_package() with zipfile.ZipFile(zappa_cli.zip_path, "r") as lambda_zip: content = lambda_zip.read("zappa_settings.py") @@ -2322,7 +2211,21 @@ def test_slim_handler(self): zappa_cli = ZappaCLI() zappa_cli.api_stage = "slim_handler" zappa_cli.load_settings("test_settings.json") - zappa_cli.create_package() + + # create_package builds the package from the latest zappa pypi release + # If the *current* minor release is not available on pypi create_package() will fail + # assumes that the latest pypi release has a tag matching "v?[0-9]+.[0-9]+.[0-9]+" defined in git. + command = "git tag" + command_output = check_output(command, shell=True).decode("utf8") + + # get valid versions from tags + version_match_string = "v?[0-9]+.[0-9]+.[0-9]+" + tags = [ + tag.strip() for tag in command_output.split("\n") if tag.strip() and re.match(version_match_string, tag.strip()) + ] + + latest_release_tag = sorted(tags, key=version.parse)[-1] + zappa_cli.create_package(use_zappa_release=latest_release_tag) self.assertTrue(os.path.isfile(zappa_cli.handler_path)) self.assertTrue(os.path.isfile(zappa_cli.zip_path)) @@ -2363,9 +2266,7 @@ def test_settings_py_generation(self): # validate environment variables self.assertIn("ENVIRONMENT_VARIABLES", settings) - self.assertEqual( - settings["ENVIRONMENT_VARIABLES"]["TEST_ENV_VAR"], "test_value" - ) + self.assertEqual(settings["ENVIRONMENT_VARIABLES"]["TEST_ENV_VAR"], "test_value") # validate Context header mappings self.assertIn("CONTEXT_HEADER_MAPPINGS", settings) @@ -2383,9 +2284,7 @@ def test_only_ascii_env_var_allowed(self): zappa_cli.load_settings("tests/test_non_ascii_environment_var_key.json") with self.assertRaises(ValueError) as context: zappa_cli.create_package() - self.assertEqual( - "Environment variable keys must be ascii.", str(context.exception) - ) + self.assertEqual("Environment variable keys must be ascii.", str(context.exception)) def test_titlecase_keys(self): raw = { @@ -2414,11 +2313,7 @@ def test_titlecase_keys(self): def test_is_valid_bucket_name(self): # Bucket names must be at least 3 and no more than 63 characters long. self.assertFalse(is_valid_bucket_name("ab")) - self.assertFalse( - is_valid_bucket_name( - "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefhijlmn" - ) - ) + self.assertFalse(is_valid_bucket_name("abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefhijlmn")) # Bucket names must not contain uppercase characters or underscores. self.assertFalse(is_valid_bucket_name("aaaBaaa")) self.assertFalse(is_valid_bucket_name("aaa_aaa")) @@ -2483,9 +2378,7 @@ def test_zappa_core_deploy_lambda_alb(self): load_credentials=False, ) zappa_core.elbv2_client = botocore.session.get_session().create_client("elbv2") - zappa_core.lambda_client = botocore.session.get_session().create_client( - "lambda" - ) + zappa_core.lambda_client = botocore.session.get_session().create_client("lambda") elbv2_stubber = botocore.stub.Stubber(zappa_core.elbv2_client) lambda_stubber = botocore.stub.Stubber(zappa_core.lambda_client) @@ -2518,25 +2411,15 @@ def test_zappa_core_deploy_lambda_alb(self): expected_params={ "LoadBalancerArns": [loadbalancer_arn], }, - service_response={ - "LoadBalancers": [ - {"LoadBalancerArn": loadbalancer_arn, "State": {"Code": "active"}} - ] - }, + service_response={"LoadBalancers": [{"LoadBalancerArn": loadbalancer_arn, "State": {"Code": "active"}}]}, ) elbv2_stubber.add_response( "modify_load_balancer_attributes", expected_params={ "LoadBalancerArn": loadbalancer_arn, - "Attributes": [ - {"Key": "idle_timeout.timeout_seconds", "Value": kwargs["timeout"]} - ], - }, - service_response={ - "Attributes": [ - {"Key": "idle_timeout.timeout_seconds", "Value": kwargs["timeout"]} - ] + "Attributes": [{"Key": "idle_timeout.timeout_seconds", "Value": kwargs["timeout"]}], }, + service_response={"Attributes": [{"Key": "idle_timeout.timeout_seconds", "Value": kwargs["timeout"]}]}, ) elbv2_stubber.add_response( @@ -2557,14 +2440,10 @@ def test_zappa_core_deploy_lambda_alb(self): "modify_target_group_attributes", expected_params={ "TargetGroupArn": targetgroup_arn, - "Attributes": [ - {"Key": "lambda.multi_value_headers.enabled", "Value": "true"} - ], + "Attributes": [{"Key": "lambda.multi_value_headers.enabled", "Value": "true"}], }, service_response={ - "Attributes": [ - {"Key": "lambda.multi_value_headers.enabled", "Value": "true"} - ], + "Attributes": [{"Key": "lambda.multi_value_headers.enabled", "Value": "true"}], }, ) @@ -2583,9 +2462,7 @@ def test_zappa_core_deploy_lambda_alb(self): "register_targets", expected_params={ "TargetGroupArn": targetgroup_arn, - "Targets": [ - {"Id": "{}:{}".format(kwargs["lambda_arn"], ALB_LAMBDA_ALIAS)} - ], + "Targets": [{"Id": "{}:{}".format(kwargs["lambda_arn"], ALB_LAMBDA_ALIAS)}], }, service_response={}, ) @@ -2625,9 +2502,7 @@ def test_zappa_core_undeploy_lambda_alb(self): load_credentials=False, ) zappa_core.elbv2_client = botocore.session.get_session().create_client("elbv2") - zappa_core.lambda_client = botocore.session.get_session().create_client( - "lambda" - ) + zappa_core.lambda_client = botocore.session.get_session().create_client("lambda") elbv2_stubber = botocore.stub.Stubber(zappa_core.elbv2_client) lambda_stubber = botocore.stub.Stubber(zappa_core.lambda_client) diff --git a/tests/tests_async.py b/tests/tests_async.py index 2fb59c544..7b4f58d19 100644 --- a/tests/tests_async.py +++ b/tests/tests_async.py @@ -73,9 +73,7 @@ def test_async_call_with_defaults(self): async_me = import_and_get_task("tests.test_app.async_me") lambda_async_mock = mock.Mock() lambda_async_mock.return_value.send.return_value = "Running async!" - with mock.patch.dict( - "zappa.asynchronous.ASYNC_CLASSES", {"lambda": lambda_async_mock} - ): + with mock.patch.dict("zappa.asynchronous.ASYNC_CLASSES", {"lambda": lambda_async_mock}): # First check that it still runs synchronously by default self.assertEqual(async_me("123"), "run async when on lambda 123") @@ -95,6 +93,4 @@ def test_async_call_with_defaults(self): capture_response=False, lambda_function_name="MyLambda", ) - lambda_async_mock.return_value.send.assert_called_with( - get_func_task_path(async_me), ("qux",), {} - ) + lambda_async_mock.return_value.send.assert_called_with(get_func_task_path(async_me), ("qux",), {}) diff --git a/tests/tests_async_old.py b/tests/tests_async_old.py index b6676f077..37bd1a7dc 100644 --- a/tests/tests_async_old.py +++ b/tests/tests_async_old.py @@ -84,9 +84,7 @@ def test_async_call_with_defaults(self): async_me = import_and_get_task("tests.test_app.async_me") lambda_async_mock = mock.Mock() lambda_async_mock.return_value.send.return_value = "Running async!" - with mock.patch.dict( - "zappa.async.ASYNC_CLASSES", {"lambda": lambda_async_mock} - ): + with mock.patch.dict("zappa.async.ASYNC_CLASSES", {"lambda": lambda_async_mock}): # First check that it still runs synchronously by default self.assertEqual(async_me("123"), "run async when on lambda 123") @@ -106,6 +104,4 @@ def test_async_call_with_defaults(self): capture_response=False, lambda_function_name="MyLambda", ) - lambda_async_mock.return_value.send.assert_called_with( - get_func_task_path(async_me), ("qux",), {} - ) + lambda_async_mock.return_value.send.assert_called_with(get_func_task_path(async_me), ("qux",), {}) diff --git a/tests/tests_docs.py b/tests/tests_docs.py index 3498a1682..7c45156ad 100644 --- a/tests/tests_docs.py +++ b/tests/tests_docs.py @@ -64,9 +64,7 @@ def test_readmetoc(self): # skip empty header continue - header_text_no_spaces = header_text_strip.replace( - " ", "-" - ).lower() + header_text_no_spaces = header_text_strip.replace(" ", "-").lower() toc_line = " " * (len(header) - 2) + "- [%s](#%s)" % ( header_text, header_text_no_spaces.lower(), @@ -88,14 +86,10 @@ def test_readmetoc(self): msg = "README.test.md written so you can manually compare." else: - msg = ( - "You can set environ[ZAPPA_TEST_SAVE_README_NEW]=1 to generate\n" - " README.test.md to manually compare." - ) + msg = "You can set environ[ZAPPA_TEST_SAVE_README_NEW]=1 to generate\n" " README.test.md to manually compare." self.assertEquals( "".join(old_readme), new_readme, - "README doesn't match after regenerating TOC\n\n" - "You need to run doctoc after a heading change.\n{}".format(msg), + "README doesn't match after regenerating TOC\n\n" "You need to run doctoc after a heading change.\n{}".format(msg), ) diff --git a/tests/tests_middleware.py b/tests/tests_middleware.py index f9d88529e..2afaf1636 100644 --- a/tests/tests_middleware.py +++ b/tests/tests_middleware.py @@ -61,9 +61,7 @@ def simple_app(environ, start_response): def simple_app(environ, start_response): # String of weird characters status = "301 Moved Permanently" - response_headers = [ - ("Location", f"http://zappa.com/elsewhere{ugly_string}") - ] + response_headers = [("Location", f"http://zappa.com/elsewhere{ugly_string}")] start_response(status, response_headers) return [ugly_string] @@ -88,9 +86,7 @@ def test_wsgi_authorizer_handling(self): "query": {}, } - environ = create_wsgi_request( - event, script_name="http://zappa.com/", trailing_slash=False - ) + environ = create_wsgi_request(event, script_name="http://zappa.com/", trailing_slash=False) self.assertEqual(environ["REMOTE_USER"], "user1") # With empty authorizer, should not include REMOTE_USER @@ -106,9 +102,7 @@ def test_wsgi_authorizer_handling(self): "query": {}, } - environ = create_wsgi_request( - event, script_name="http://zappa.com/", trailing_slash=False - ) + environ = create_wsgi_request(event, script_name="http://zappa.com/", trailing_slash=False) user = environ.get("REMOTE_USER", "no_user") self.assertEqual(user, "no_user") @@ -125,9 +119,7 @@ def test_wsgi_authorizer_handling(self): "query": {}, } - environ = create_wsgi_request( - event, script_name="http://zappa.com/", trailing_slash=False - ) + environ = create_wsgi_request(event, script_name="http://zappa.com/", trailing_slash=False) user = environ.get("REMOTE_USER", "no_user") self.assertEqual(user, "no_user") @@ -144,9 +136,7 @@ def test_wsgi_authorizer_handling(self): "query": {}, } - environ = create_wsgi_request( - event, script_name="http://zappa.com/", trailing_slash=False - ) + environ = create_wsgi_request(event, script_name="http://zappa.com/", trailing_slash=False) user = environ.get("REMOTE_USER", "no_user") self.assertEqual(user, "no_user") @@ -246,9 +236,7 @@ def test_should_allow_empty_query_params(self): }, "query": {}, } - environ = create_wsgi_request( - event, script_name="http://zappa.com/", trailing_slash=False - ) + environ = create_wsgi_request(event, script_name="http://zappa.com/", trailing_slash=False) self.assertEqual(environ["QUERY_STRING"], "") def test_should_handle_multi_value_query_string_params(self): @@ -278,7 +266,5 @@ def test_should_handle_multi_value_query_string_params(self): }, "query": {}, } - environ = create_wsgi_request( - event, script_name="http://zappa.com/", trailing_slash=False - ) + environ = create_wsgi_request(event, script_name="http://zappa.com/", trailing_slash=False) self.assertEqual(environ["QUERY_STRING"], "foo=1&foo=2") diff --git a/tests/tests_placebo.py b/tests/tests_placebo.py index 0be35eef8..2b8f43cf8 100644 --- a/tests/tests_placebo.py +++ b/tests/tests_placebo.py @@ -240,9 +240,7 @@ def test_handler(self, session): "version": "0", "time": "2016-05-10T21:05:39Z", "id": "0d6a6db0-d5e7-4755-93a0-750a8bf49d55", - "resources": [ - "arn:aws:events:us-east-1:72333333333:rule/tests.test_app.schedule_me" - ], + "resources": ["arn:aws:events:us-east-1:72333333333:rule/tests.test_app.schedule_me"], } lh.handler(event, None) @@ -256,9 +254,7 @@ def test_handler(self, session): "version": "0", "time": "2016-05-10T21:05:39Z", "id": "0d6a6db0-d5e7-4755-93a0-750a8bf49d55", - "resources": [ - "arn:aws:events:us-east-1:72333333333:rule/tests.test_app.schedule_me" - ], + "resources": ["arn:aws:events:us-east-1:72333333333:rule/tests.test_app.schedule_me"], } lh.handler(event, None) @@ -290,9 +286,7 @@ def test_handler(self, session): "version": "0", "time": "2016-05-10T21:05:39Z", "id": "0d6a6db0-d5e7-4755-93a0-750a8bf49d55", - "resources": [ - "arn:aws:events:us-east-1:72333333333:rule/tests.test_app.schedule_me" - ], + "resources": ["arn:aws:events:us-east-1:72333333333:rule/tests.test_app.schedule_me"], } lh.handler(event, None) @@ -301,16 +295,12 @@ def test_handler(self, session): "account": "72333333333", "region": "us-east-1", "detail": {}, - "Records": [ - {"s3": {"configurationId": "test_project:test_settings.aws_s3_event"}} - ], + "Records": [{"s3": {"configurationId": "test_project:test_settings.aws_s3_event"}}], "source": "aws.events", "version": "0", "time": "2016-05-10T21:05:39Z", "id": "0d6a6db0-d5e7-4755-93a0-750a8bf49d55", - "resources": [ - "arn:aws:events:us-east-1:72333333333:rule/tests.test_app.schedule_me" - ], + "resources": ["arn:aws:events:us-east-1:72333333333:rule/tests.test_app.schedule_me"], } self.assertEqual("AWS S3 EVENT", lh.handler(event, None)) @@ -522,9 +512,7 @@ def test_add_event_source(self, session): event_source = {"arn": "blah:blah:blah:blah", "events": ["s3:ObjectCreated:*"]} # Sanity. This should fail. try: - es = add_event_source( - event_source, "blah:blah:blah:blah", "test_settings.callback", session - ) + es = add_event_source(event_source, "blah:blah:blah:blah", "test_settings.callback", session) self.fail("Success should have failed.") except ValueError: pass @@ -572,9 +560,7 @@ def test_cli_cognito_triggers(self, session): zappa_cli.api_stage = "ttt888" zappa_cli.api_key_required = True zappa_cli.load_settings("test_settings.json", session) - zappa_cli.lambda_arn = ( - "arn:aws:lambda:us-east-1:12345:function:Zappa-Trigger-Test" - ) + zappa_cli.lambda_arn = "arn:aws:lambda:us-east-1:12345:function:Zappa-Trigger-Test" zappa_cli.update_cognito_triggers() @placebo_session diff --git a/tests/utils.py b/tests/utils.py index 31453e0ac..91e588cc4 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -29,9 +29,7 @@ def placebo_session(function): @functools.wraps(function) def wrapper(*args, **kwargs): - session_kwargs = { - "region_name": os.environ.get("AWS_DEFAULT_REGION", "us-east-1") - } + session_kwargs = {"region_name": os.environ.get("AWS_DEFAULT_REGION", "us-east-1")} profile_name = os.environ.get("PLACEBO_PROFILE", None) if profile_name: session_kwargs["profile_name"] = profile_name diff --git a/zappa/__init__.py b/zappa/__init__.py index 371973651..0513a377f 100644 --- a/zappa/__init__.py +++ b/zappa/__init__.py @@ -1,16 +1,12 @@ import sys -SUPPORTED_VERSIONS = [(3, 6), (3, 7), (3, 8), (3, 9)] +SUPPORTED_VERSIONS = [(3, 7), (3, 8), (3, 9)] if sys.version_info[:2] not in SUPPORTED_VERSIONS: - formatted_supported_versions = [ - "{}.{}".format(*version) for version in SUPPORTED_VERSIONS - ] + formatted_supported_versions = ["{}.{}".format(*version) for version in SUPPORTED_VERSIONS] err_msg = "This version of Python ({}.{}) is not supported!\n".format( *sys.version_info - ) + "Zappa (and AWS Lambda) support the following versions of Python: {}".format( - formatted_supported_versions - ) + ) + "Zappa (and AWS Lambda) support the following versions of Python: {}".format(formatted_supported_versions) raise RuntimeError(err_msg) -__version__ = "0.54.1" +__version__ = "0.55.0" diff --git a/zappa/async.py b/zappa/async.py index c573b3283..ff48d6a2d 100644 --- a/zappa/async.py +++ b/zappa/async.py @@ -1,8 +1,8 @@ -# -*- coding: utf-8 -*- import warnings +from .asynchronous import * # noqa: F401 + warnings.warn( 'Module "zappa.async" is deprecated; please use "zappa.asynchronous" instead.', category=DeprecationWarning, ) -from .asynchronous import * diff --git a/zappa/asynchronous.py b/zappa/asynchronous.py index db25da0de..7c231a750 100644 --- a/zappa/asynchronous.py +++ b/zappa/asynchronous.py @@ -110,7 +110,7 @@ def my_async_func(*args, **kwargs): SNS_CLIENT = aws_session.client("sns") STS_CLIENT = aws_session.client("sts") DYNAMODB_CLIENT = aws_session.client("dynamodb") -except botocore.exceptions.NoRegionError as e: # pragma: no cover +except botocore.exceptions.NoRegionError: # pragma: no cover # This can happen while testing on Travis, but it's taken care of # during class initialization. pass @@ -136,13 +136,7 @@ class LambdaAsyncResponse: Can be used directly or subclassed if the method to send the message is changed. """ - def __init__( - self, - lambda_function_name=None, - aws_region=None, - capture_response=False, - **kwargs - ): + def __init__(self, lambda_function_name=None, aws_region=None, capture_response=False, **kwargs): """ """ if kwargs.get("boto_session"): self.client = kwargs.get("boto_session").client("lambda") @@ -204,13 +198,7 @@ class SnsAsyncResponse(LambdaAsyncResponse): Serialise the func path and arguments """ - def __init__( - self, - lambda_function_name=None, - aws_region=None, - capture_response=False, - **kwargs - ): + def __init__(self, lambda_function_name=None, aws_region=None, capture_response=False, **kwargs): self.lambda_function_name = lambda_function_name self.aws_region = aws_region @@ -360,17 +348,12 @@ def run( and other arguments are similar to @task """ - lambda_function_name = remote_aws_lambda_function_name or os.environ.get( - "AWS_LAMBDA_FUNCTION_NAME" - ) + lambda_function_name = remote_aws_lambda_function_name or os.environ.get("AWS_LAMBDA_FUNCTION_NAME") aws_region = remote_aws_region or os.environ.get("AWS_REGION") task_path = get_func_task_path(func) return ASYNC_CLASSES[service]( - lambda_function_name=lambda_function_name, - aws_region=aws_region, - capture_response=capture_response, - **task_kwargs + lambda_function_name=lambda_function_name, aws_region=aws_region, capture_response=capture_response, **task_kwargs ).send(task_path, args, kwargs) @@ -436,9 +419,7 @@ def _run_async(*args, **kwargs): When outside of Lambda, the func passed to @task is run and we return the actual value. """ - lambda_function_name = lambda_function_name_arg or os.environ.get( - "AWS_LAMBDA_FUNCTION_NAME" - ) + lambda_function_name = lambda_function_name_arg or os.environ.get("AWS_LAMBDA_FUNCTION_NAME") aws_region = aws_region_arg or os.environ.get("AWS_REGION") if (service in ASYNC_CLASSES) and (lambda_function_name): @@ -489,9 +470,7 @@ def get_func_task_path(func): Format the modular task path for a function via inspection. """ module_path = inspect.getmodule(func).__name__ - task_path = "{module_path}.{func_name}".format( - module_path=module_path, func_name=func.__name__ - ) + task_path = "{module_path}.{func_name}".format(module_path=module_path, func_name=func.__name__) return task_path @@ -499,9 +478,7 @@ def get_async_response(response_id): """ Get the response from the async table """ - response = DYNAMODB_CLIENT.get_item( - TableName=ASYNC_RESPONSE_TABLE, Key={"id": {"S": str(response_id)}} - ) + response = DYNAMODB_CLIENT.get_item(TableName=ASYNC_RESPONSE_TABLE, Key={"id": {"S": str(response_id)}}) if "Item" not in response: return None diff --git a/zappa/cli.py b/zappa/cli.py index 546e4b19c..305e74e30 100755 --- a/zappa/cli.py +++ b/zappa/cli.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - """ Zappa CLI @@ -12,7 +9,6 @@ import collections import importlib import inspect -import logging import os import pkgutil import random @@ -24,6 +20,7 @@ import zipfile from builtins import bytes, input from datetime import datetime, timedelta +from typing import Optional import argcomplete import botocore @@ -38,11 +35,9 @@ from click.exceptions import ClickException from click.globals import push_context from dateutil import parser -from past.builtins import basestring -from .core import API_GATEWAY_REGIONS, Zappa, logger +from .core import API_GATEWAY_REGIONS, Zappa from .utilities import ( - InvalidAwsLambdaName, check_new_version_available, detect_django_settings, detect_flask_apps, @@ -70,9 +65,7 @@ "touch", ] -BOTO3_CONFIG_DOCS_URL = ( - "https://boto3.readthedocs.io/en/latest/guide/quickstart.html#configuration" -) +BOTO3_CONFIG_DOCS_URL = "https://boto3.readthedocs.io/en/latest/guide/quickstart.html#configuration" ## # Main Input Processing @@ -130,9 +123,7 @@ class ZappaCLI: stage_name_env_pattern = re.compile("^[a-zA-Z0-9_]+$") def __init__(self): - self._stage_config_overrides = ( - {} - ) # change using self.override_stage_config_setting(key, val) + self._stage_config_overrides = {} # change using self.override_stage_config_setting(key, val) @property def stage_config(self): @@ -154,16 +145,12 @@ def get_stage_setting(stage, extended_stages=None): try: stage_settings = dict(self.zappa_settings[stage].copy()) except KeyError: - raise ClickException( - "Cannot extend settings for undefined stage '" + stage + "'." - ) + raise ClickException("Cannot extend settings for undefined stage '" + stage + "'.") extends_stage = self.zappa_settings[stage].get("extends", None) if not extends_stage: return stage_settings - extended_settings = get_stage_setting( - stage=extends_stage, extended_stages=extended_stages - ) + extended_settings = get_stage_setting(stage=extends_stage, extended_stages=extended_stages) extended_settings.update(stage_settings) return extended_settings @@ -209,9 +196,7 @@ def handle(self, argv=None): version=pkg_resources.get_distribution("zappa").version, help="Print the zappa version", ) - parser.add_argument( - "--color", default="auto", choices=["auto", "never", "always"] - ) + parser.add_argument("--color", default="auto", choices=["auto", "never", "always"]) env_parser = argparse.ArgumentParser(add_help=False) me_group = env_parser.add_mutually_exclusive_group() @@ -220,15 +205,9 @@ def handle(self, argv=None): me_group.add_argument("stage_env", nargs="?") group = env_parser.add_argument_group() - group.add_argument( - "-a", "--app_function", help="The WSGI application function." - ) - group.add_argument( - "-s", "--settings_file", help="The path to a Zappa settings file." - ) - group.add_argument( - "-q", "--quiet", action="store_true", help="Silence all output." - ) + group.add_argument("-a", "--app_function", help="The WSGI application function.") + group.add_argument("-s", "--settings_file", help="The path to a Zappa settings file.") + group.add_argument("-q", "--quiet", action="store_true", help="Silence all output.") # https://github.com/Miserlou/Zappa/issues/407 # Moved when 'template' command added. # Fuck Terraform. @@ -239,36 +218,25 @@ def handle(self, argv=None): help="Make the output of this command be machine readable.", ) # https://github.com/Miserlou/Zappa/issues/891 - group.add_argument( - "--disable_progress", action="store_true", help="Disable progress bars." - ) + group.add_argument("--disable_progress", action="store_true", help="Disable progress bars.") group.add_argument("--no_venv", action="store_true", help="Skip venv check.") ## # Certify ## subparsers = parser.add_subparsers(title="subcommands", dest="command") - cert_parser = subparsers.add_parser( - "certify", parents=[env_parser], help="Create and install SSL certificate" - ) + cert_parser = subparsers.add_parser("certify", parents=[env_parser], help="Create and install SSL certificate") cert_parser.add_argument( "--manual", action="store_true", - help=( - "Gets new Let's Encrypt certificates, but prints them to console." - "Does not update API Gateway domains." - ), - ) - cert_parser.add_argument( - "-y", "--yes", action="store_true", help="Auto confirm yes." + help=("Gets new Let's Encrypt certificates, but prints them to console." "Does not update API Gateway domains."), ) + cert_parser.add_argument("-y", "--yes", action="store_true", help="Auto confirm yes.") ## # Deploy ## - deploy_parser = subparsers.add_parser( - "deploy", parents=[env_parser], help="Deploy application." - ) + deploy_parser = subparsers.add_parser("deploy", parents=[env_parser], help="Deploy application.") deploy_parser.add_argument( "-z", "--zip", @@ -283,7 +251,7 @@ def handle(self, argv=None): ## # Init ## - init_parser = subparsers.add_parser("init", help="Initialize Zappa app.") + subparsers.add_parser("init", help="Initialize Zappa app.") ## # Package @@ -293,9 +261,7 @@ def handle(self, argv=None): parents=[env_parser], help="Build the application zip package locally.", ) - package_parser.add_argument( - "-o", "--output", help="Name of file to output the package to." - ) + package_parser.add_argument("-o", "--output", help="Name of file to output the package to.") ## # Template @@ -311,52 +277,32 @@ def handle(self, argv=None): required=True, help="ARN of the Lambda function to template to.", ) - template_parser.add_argument( - "-r", "--role-arn", required=True, help="ARN of the Role to template with." - ) - template_parser.add_argument( - "-o", "--output", help="Name of file to output the template to." - ) + template_parser.add_argument("-r", "--role-arn", required=True, help="ARN of the Role to template with.") + template_parser.add_argument("-o", "--output", help="Name of file to output the template to.") ## # Invocation ## - invoke_parser = subparsers.add_parser( - "invoke", parents=[env_parser], help="Invoke remote function." - ) + invoke_parser = subparsers.add_parser("invoke", parents=[env_parser], help="Invoke remote function.") invoke_parser.add_argument( "--raw", action="store_true", - help=( - "When invoking remotely, invoke this python as a string," - " not as a modular path." - ), - ) - invoke_parser.add_argument( - "--no-color", action="store_true", help=("Don't color the output") + help=("When invoking remotely, invoke this python as a string," " not as a modular path."), ) + invoke_parser.add_argument("--no-color", action="store_true", help=("Don't color the output")) invoke_parser.add_argument("command_rest") ## # Manage ## - manage_parser = subparsers.add_parser( - "manage", help="Invoke remote Django manage.py commands." - ) - rest_help = ( - "Command in the form of . is not " - "required if --all is specified" - ) + manage_parser = subparsers.add_parser("manage", help="Invoke remote Django manage.py commands.") + rest_help = "Command in the form of . is not " "required if --all is specified" manage_parser.add_argument("--all", action="store_true", help=all_help) manage_parser.add_argument("command_rest", nargs="+", help=rest_help) - manage_parser.add_argument( - "--no-color", action="store_true", help=("Don't color the output") - ) + manage_parser.add_argument("--no-color", action="store_true", help=("Don't color the output")) # This is explicitly added here because this is the only subcommand that doesn't inherit from env_parser # https://github.com/Miserlou/Zappa/issues/1002 - manage_parser.add_argument( - "-s", "--settings_file", help="The path to a Zappa settings file." - ) + manage_parser.add_argument("-s", "--settings_file", help="The path to a Zappa settings file.") ## # Rollback @@ -403,12 +349,8 @@ def positive_int(s): ## # Log Tailing ## - tail_parser = subparsers.add_parser( - "tail", parents=[env_parser], help="Tail deployment logs." - ) - tail_parser.add_argument( - "--no-color", action="store_true", help="Don't color log tail output." - ) + tail_parser = subparsers.add_parser("tail", parents=[env_parser], help="Tail deployment logs.") + tail_parser.add_argument("--no-color", action="store_true", help="Don't color log tail output.") tail_parser.add_argument( "--http", action="store_true", @@ -425,9 +367,7 @@ def positive_int(s): default="100000s", help="Only show lines since a certain timeframe.", ) - tail_parser.add_argument( - "--filter", type=str, default="", help="Apply a filter pattern to the logs." - ) + tail_parser.add_argument("--filter", type=str, default="", help="Apply a filter pattern to the logs.") tail_parser.add_argument( "--force-color", action="store_true", @@ -442,34 +382,23 @@ def positive_int(s): ## # Undeploy ## - undeploy_parser = subparsers.add_parser( - "undeploy", parents=[env_parser], help="Undeploy application." - ) + undeploy_parser = subparsers.add_parser("undeploy", parents=[env_parser], help="Undeploy application.") undeploy_parser.add_argument( "--remove-logs", action="store_true", - help=( - "Removes log groups of api gateway and lambda task" - " during the undeployment." - ), - ) - undeploy_parser.add_argument( - "-y", "--yes", action="store_true", help="Auto confirm yes." + help=("Removes log groups of api gateway and lambda task" " during the undeployment."), ) + undeploy_parser.add_argument("-y", "--yes", action="store_true", help="Auto confirm yes.") ## # Unschedule ## - subparsers.add_parser( - "unschedule", parents=[env_parser], help="Unschedule functions." - ) + subparsers.add_parser("unschedule", parents=[env_parser], help="Unschedule functions.") ## # Updating ## - update_parser = subparsers.add_parser( - "update", parents=[env_parser], help="Update deployed application." - ) + update_parser = subparsers.add_parser("update", parents=[env_parser], help="Update deployed application.") update_parser.add_argument( "-z", "--zip", @@ -664,16 +593,12 @@ def dispatch_command(self, command, stage): if not self.django_settings: print("This command is for Django projects only!") - print( - "If this is a Django project, please define django_settings in your zappa_settings." - ) + print("If this is a Django project, please define django_settings in your zappa_settings.") return command_tail = self.vargs.get("command_rest") if len(command_tail) > 1: - command = " ".join( - command_tail - ) # ex: zappa manage dev "shell --version" + command = " ".join(command_tail) # ex: zappa manage dev "shell --version" else: command = command_tail[0] # ex: zappa manage dev showmigrations admin @@ -694,9 +619,7 @@ def dispatch_command(self, command, stage): keep_open=not self.vargs["disable_keep_open"], ) elif command == "undeploy": # pragma: no cover - self.undeploy( - no_confirm=self.vargs["yes"], remove_logs=self.vargs["remove_logs"] - ) + self.undeploy(no_confirm=self.vargs["yes"], remove_logs=self.vargs["remove_logs"]) elif command == "schedule": # pragma: no cover self.schedule() elif command == "unschedule": # pragma: no cover @@ -716,11 +639,7 @@ def dispatch_command(self, command, stage): def save_python_settings_file(self, output_path=None): settings_path = output_path or "zappa_settings.py" - print( - "Generating Zappa settings Python file and saving to {}".format( - settings_path - ) - ) + print("Generating Zappa settings Python file and saving to {}".format(settings_path)) if not settings_path.endswith("zappa_settings.py"): raise ValueError("Settings file must be named zappa_settings.py") zappa_settings_s = self.get_zappa_settings_string() @@ -778,22 +697,14 @@ def template(self, lambda_arn, role_arn, output=None, json=False): ) if not output: - template_file = ( - self.lambda_name + "-template-" + str(int(time.time())) + ".json" - ) + template_file = self.lambda_name + "-template-" + str(int(time.time())) + ".json" else: template_file = output with open(template_file, "wb") as out: - out.write( - bytes(template.to_json(indent=None, separators=(",", ":")), "utf-8") - ) + out.write(bytes(template.to_json(indent=None, separators=(",", ":")), "utf-8")) if not json: - click.echo( - click.style("Template created", fg="green", bold=True) - + ": " - + click.style(template_file, bold=True) - ) + click.echo(click.style("Template created", fg="green", bold=True) + ": " + click.style(template_file, bold=True)) else: with open(template_file, "r") as out: print(out.read()) @@ -873,18 +784,14 @@ def deploy(self, source_zip=None, docker_image_uri=None): raise ClickException("Unable to upload handler to S3. Quitting.") # Copy the project zip to the current project zip - current_project_name = "{0!s}_{1!s}_current_project.tar.gz".format( - self.api_stage, self.project_name - ) + current_project_name = "{0!s}_{1!s}_current_project.tar.gz".format(self.api_stage, self.project_name) success = self.zappa.copy_on_s3( src_file_name=self.zip_path, dst_file_name=current_project_name, bucket_name=self.s3_bucket_name, ) if not success: # pragma: no cover - raise ClickException( - "Unable to copy the zip to be the current project. Quitting." - ) + raise ClickException("Unable to copy the zip to be the current project. Quitting.") handler_file = self.handler_path else: @@ -892,9 +799,7 @@ def deploy(self, source_zip=None, docker_image_uri=None): # Fixes https://github.com/Miserlou/Zappa/issues/613 try: - self.lambda_arn = self.zappa.get_lambda_function( - function_name=self.lambda_name - ) + self.lambda_arn = self.zappa.get_lambda_function(function_name=self.lambda_name) except botocore.client.ClientError: # Register the Lambda function with that zip as the source # You'll also need to define the path to your lambda_handler code. @@ -933,9 +838,7 @@ def deploy(self, source_zip=None, docker_image_uri=None): self.schedule() endpoint_url = "" - deployment_string = ( - click.style("Deployment complete", fg="green", bold=True) + "!" - ) + deployment_string = click.style("Deployment complete", fg="green", bold=True) + "!" if self.use_alb: kwargs = dict( @@ -949,7 +852,7 @@ def deploy(self, source_zip=None, docker_image_uri=None): if self.use_apigateway: # Create and configure the API Gateway - template = self.zappa.create_stack_template( + self.zappa.create_stack_template( lambda_arn=self.lambda_arn, lambda_name=self.lambda_name, api_key_required=self.api_key_required, @@ -977,9 +880,7 @@ def deploy(self, source_zip=None, docker_image_uri=None): if self.stage_config.get("payload_compression", True): self.zappa.add_api_compression( api_id=api_id, - min_compression_size=self.stage_config.get( - "payload_minimum_compression_size", 0 - ), + min_compression_size=self.stage_config.get("payload_minimum_compression_size", 0), ) # Deploy the API! @@ -991,14 +892,10 @@ def deploy(self, source_zip=None, docker_image_uri=None): if self.api_key is None: self.zappa.create_api_key(api_id=api_id, stage_name=self.api_stage) else: - self.zappa.add_api_stage_to_api_key( - api_key=self.api_key, api_id=api_id, stage_name=self.api_stage - ) + self.zappa.add_api_stage_to_api_key(api_key=self.api_key, api_id=api_id, stage_name=self.api_stage) if self.stage_config.get("touch", True): - self.zappa.wait_until_lambda_function_is_updated( - function_name=self.lambda_name - ) + self.zappa.wait_until_lambda_function_is_updated(function_name=self.lambda_name) self.touch_endpoint(endpoint_url) # Finally, delete the local copy our zip package @@ -1030,16 +927,14 @@ def update(self, source_zip=None, no_upload=False, docker_image_uri=None): # Temporary version check try: updated_time = 1472581018 - function_response = self.zappa.lambda_client.get_function( - FunctionName=self.lambda_name - ) + function_response = self.zappa.lambda_client.get_function(FunctionName=self.lambda_name) conf = function_response["Configuration"] last_updated = parser.parse(conf["LastModified"]) last_updated_unix = time.mktime(last_updated.timetuple()) except botocore.exceptions.BotoCoreError as e: click.echo(click.style(type(e).__name__, fg="red") + ": " + e.args[0]) sys.exit(-1) - except Exception as e: + except Exception: click.echo( click.style("Warning!", fg="red") + " Couldn't get function " @@ -1063,12 +958,7 @@ def update(self, source_zip=None, no_upload=False, docker_image_uri=None): try: self.zappa.create_iam_roles() except botocore.client.ClientError: - click.echo( - click.style("Failed", fg="red") - + " to " - + click.style("manage IAM roles", bold=True) - + "!" - ) + click.echo(click.style("Failed", fg="red") + " to " + click.style("manage IAM roles", bold=True) + "!") click.echo( "You may " + click.style("lack the necessary AWS permissions", bold=True) @@ -1107,23 +997,17 @@ def update(self, source_zip=None, no_upload=False, docker_image_uri=None): disable_progress=self.disable_progress, ) if not success: # pragma: no cover - raise ClickException( - "Unable to upload handler to S3. Quitting." - ) + raise ClickException("Unable to upload handler to S3. Quitting.") # Copy the project zip to the current project zip - current_project_name = "{0!s}_{1!s}_current_project.tar.gz".format( - self.api_stage, self.project_name - ) + current_project_name = "{0!s}_{1!s}_current_project.tar.gz".format(self.api_stage, self.project_name) success = self.zappa.copy_on_s3( src_file_name=self.zip_path, dst_file_name=current_project_name, bucket_name=self.s3_bucket_name, ) if not success: # pragma: no cover - raise ClickException( - "Unable to copy the zip to be the current project. Quitting." - ) + raise ClickException("Unable to copy the zip to be the current project. Quitting.") handler_file = self.handler_path else: @@ -1210,9 +1094,7 @@ def update(self, source_zip=None, no_upload=False, docker_image_uri=None): if self.stage_config.get("payload_compression", True): self.zappa.add_api_compression( api_id=api_id, - min_compression_size=self.stage_config.get( - "payload_minimum_compression_size", 0 - ), + min_compression_size=self.stage_config.get("payload_minimum_compression_size", 0), ) else: self.zappa.remove_api_compression(api_id=api_id) @@ -1241,17 +1123,9 @@ def update(self, source_zip=None, no_upload=False, docker_image_uri=None): if self.base_path: endpoint_url += "/" + self.base_path - deployed_string = ( - "Your updated Zappa deployment is " - + click.style("live", fg="green", bold=True) - + "!" - ) + deployed_string = "Your updated Zappa deployment is " + click.style("live", fg="green", bold=True) + "!" if self.use_apigateway: - deployed_string = ( - deployed_string - + ": " - + click.style("{}".format(endpoint_url), bold=True) - ) + deployed_string = deployed_string + ": " + click.style("{}".format(endpoint_url), bold=True) api_url = None if endpoint_url and "amazonaws.com" not in endpoint_url: @@ -1261,9 +1135,7 @@ def update(self, source_zip=None, no_upload=False, docker_image_uri=None): deployed_string = deployed_string + " (" + api_url + ")" if self.stage_config.get("touch", True): - self.zappa.wait_until_lambda_function_is_updated( - function_name=self.lambda_name - ) + self.zappa.wait_until_lambda_function_is_updated(function_name=self.lambda_name) if api_url: self.touch_endpoint(api_url) elif endpoint_url: @@ -1278,9 +1150,7 @@ def rollback(self, revision): print("Rolling back..") - self.zappa.rollback_lambda_function_version( - self.lambda_name, versions_back=revision - ) + self.zappa.rollback_lambda_function_version(self.lambda_name, versions_back=revision) print("Done!") def tail( @@ -1351,9 +1221,7 @@ def undeploy(self, no_confirm=False, remove_logs=False): api_id = self.zappa.get_api_id(self.lambda_name) self.zappa.remove_api_key(api_id, self.api_stage) - gateway_id = self.zappa.undeploy_api_gateway( - self.lambda_name, domain_name=domain_name, base_path=base_path - ) + self.zappa.undeploy_api_gateway(self.lambda_name, domain_name=domain_name, base_path=base_path) self.unschedule() # removes event triggers, including warm up event. @@ -1373,9 +1241,7 @@ def update_cognito_triggers(self): lambda_configs = set() for trigger in triggers: lambda_configs.add(trigger["source"].split("_")[0]) - self.zappa.update_cognito( - self.lambda_name, user_pool, lambda_configs, self.lambda_arn - ) + self.zappa.update_cognito(self.lambda_name, user_pool, lambda_configs, self.lambda_arn) def schedule(self): """ @@ -1396,9 +1262,7 @@ def schedule(self): if not events: events = [] - keep_warm_rate = self.stage_config.get( - "keep_warm_expression", "rate(4 minutes)" - ) + keep_warm_rate = self.stage_config.get("keep_warm_expression", "rate(4 minutes)") events.append( { "name": "zappa-keep-warm", @@ -1410,10 +1274,8 @@ def schedule(self): if events: try: - function_response = self.zappa.lambda_client.get_function( - FunctionName=self.lambda_name - ) - except botocore.exceptions.ClientError as e: # pragma: no cover + function_response = self.zappa.lambda_client.get_function(FunctionName=self.lambda_name) + except botocore.exceptions.ClientError: # pragma: no cover click.echo( click.style("Function does not exist", fg="yellow") + ", please " @@ -1431,15 +1293,9 @@ def schedule(self): ) # Add async tasks SNS - if self.stage_config.get( - "async_source", None - ) == "sns" and self.stage_config.get("async_resources", True): - self.lambda_arn = self.zappa.get_lambda_function( - function_name=self.lambda_name - ) - topic_arn = self.zappa.create_async_sns_topic( - lambda_name=self.lambda_name, lambda_arn=self.lambda_arn - ) + if self.stage_config.get("async_source", None) == "sns" and self.stage_config.get("async_resources", True): + self.lambda_arn = self.zappa.get_lambda_function(function_name=self.lambda_name) + topic_arn = self.zappa.create_async_sns_topic(lambda_name=self.lambda_name, lambda_arn=self.lambda_arn) click.echo("SNS Topic created: %s" % topic_arn) # Add async tasks DynamoDB @@ -1447,25 +1303,19 @@ def schedule(self): read_capacity = self.stage_config.get("async_response_table_read_capacity", 1) write_capacity = self.stage_config.get("async_response_table_write_capacity", 1) if table_name and self.stage_config.get("async_resources", True): - created, response_table = self.zappa.create_async_dynamodb_table( - table_name, read_capacity, write_capacity - ) + created, response_table = self.zappa.create_async_dynamodb_table(table_name, read_capacity, write_capacity) if created: click.echo("DynamoDB table created: %s" % table_name) else: click.echo("DynamoDB table exists: %s" % table_name) - provisioned_throughput = response_table["Table"][ - "ProvisionedThroughput" - ] + provisioned_throughput = response_table["Table"]["ProvisionedThroughput"] if ( provisioned_throughput["ReadCapacityUnits"] != read_capacity or provisioned_throughput["WriteCapacityUnits"] != write_capacity ): click.echo( click.style( - "\nWarning! Existing DynamoDB table ({}) does not match configured capacity.\n".format( - table_name - ), + "\nWarning! Existing DynamoDB table ({}) does not match configured capacity.\n".format(table_name), fg="red", ) ) @@ -1485,16 +1335,12 @@ def unschedule(self): function_arn = None try: - function_response = self.zappa.lambda_client.get_function( - FunctionName=self.lambda_name - ) + function_response = self.zappa.lambda_client.get_function(FunctionName=self.lambda_name) function_arn = function_response["Configuration"]["FunctionArn"] - except botocore.exceptions.ClientError as e: # pragma: no cover + except botocore.exceptions.ClientError: # pragma: no cover raise ClickException( "Function does not exist, you should deploy first. Ex: zappa deploy {}. " - "Proceeding to unschedule CloudWatch based events.".format( - self.api_stage - ) + "Proceeding to unschedule CloudWatch based events.".format(self.api_stage) ) print("Unscheduling..") @@ -1505,9 +1351,7 @@ def unschedule(self): ) # Remove async task SNS - if self.stage_config.get( - "async_source", None - ) == "sns" and self.stage_config.get("async_resources", True): + if self.stage_config.get("async_source", None) == "sns" and self.stage_config.get("async_resources", True): removed_arns = self.zappa.remove_async_sns_topic(self.lambda_name) click.echo("SNS Topic removed: %s" % ", ".join(removed_arns)) @@ -1540,11 +1384,7 @@ def invoke(self, function_name, raw_python=False, command=None, no_color=False): # For a successful request FunctionError is not in response. # https://github.com/Miserlou/Zappa/pull/1254/ if "FunctionError" in response: - raise ClickException( - "{} error occurred while invoking command.".format( - response["FunctionError"] - ) - ) + raise ClickException("{} error occurred while invoking command.".format(response["FunctionError"])) def format_lambda_response(self, response, colorize=True): if "LogResult" in response: @@ -1602,9 +1442,7 @@ def colorize_invoke_command(self, string): if token == "[DEBUG]": format_string = "{}" pattern = re.escape(token) - repl = click.style( - format_string.format(token), bold=True, fg="cyan" - ) + repl = click.style(format_string.format(token), bold=True, fg="cyan") final_string = re.sub(pattern.format(token), repl, final_string) except Exception: # pragma: no cover pass @@ -1621,9 +1459,7 @@ def colorize_invoke_command(self, string): "Max Memory Used:", ]: if token in final_string: - final_string = final_string.replace( - token, click.style(token, bold=True, fg="green") - ) + final_string = final_string.replace(token, click.style(token, bold=True, fg="green")) except Exception: # pragma: no cover pass @@ -1631,9 +1467,7 @@ def colorize_invoke_command(self, string): for token in final_string.replace("\t", " ").split(" "): try: if token.count("-") == 4 and token.replace("-", "").isalnum(): - final_string = final_string.replace( - token, click.style(token, fg="magenta") - ) + final_string = final_string.replace(token, click.style(token, fg="magenta")) except Exception: # pragma: no cover pass @@ -1650,9 +1484,7 @@ def tabular_print(title, value): """ Convenience function for priting formatted table items. """ - click.echo( - "%-*s%s" % (32, click.style("\t" + title, fg="green") + ":", str(value)) - ) + click.echo("%-*s%s" % (32, click.style("\t" + title, fg="green") + ":", str(value))) return # Lambda Env Details @@ -1661,17 +1493,14 @@ def tabular_print(title, value): if not lambda_versions: raise ClickException( click.style( - "No Lambda %s detected in %s - have you deployed yet?" - % (self.lambda_name, self.zappa.aws_region), + "No Lambda %s detected in %s - have you deployed yet?" % (self.lambda_name, self.zappa.aws_region), fg="red", ) ) status_dict = collections.OrderedDict() status_dict["Lambda Versions"] = len(lambda_versions) - function_response = self.zappa.lambda_client.get_function( - FunctionName=self.lambda_name - ) + function_response = self.zappa.lambda_client.get_function(FunctionName=self.lambda_name) conf = function_response["Configuration"] self.lambda_arn = conf["FunctionArn"] status_dict["Lambda Name"] = self.lambda_name @@ -1687,9 +1516,7 @@ def tabular_print(title, value): status_dict["Lambda Handler"] = conf.get("Handler", "") status_dict["Lambda Runtime"] = conf.get("Runtime", "") if "VpcConfig" in conf.keys(): - status_dict["Lambda VPC ID"] = conf.get("VpcConfig", {}).get( - "VpcId", "Not assigned" - ) + status_dict["Lambda VPC ID"] = conf.get("VpcConfig", {}).get("VpcId", "Not assigned") else: status_dict["Lambda VPC ID"] = None @@ -1702,11 +1529,9 @@ def tabular_print(title, value): EndTime=datetime.utcnow(), Period=1440, Statistics=["Sum"], - Dimensions=[ - {"Name": "FunctionName", "Value": "{}".format(self.lambda_name)} - ], + Dimensions=[{"Name": "FunctionName", "Value": "{}".format(self.lambda_name)}], )["Datapoints"][0]["Sum"] - except Exception as e: + except Exception: function_invocations = 0 try: function_errors = self.zappa.cloudwatch.get_metric_statistics( @@ -1716,16 +1541,14 @@ def tabular_print(title, value): EndTime=datetime.utcnow(), Period=1440, Statistics=["Sum"], - Dimensions=[ - {"Name": "FunctionName", "Value": "{}".format(self.lambda_name)} - ], + Dimensions=[{"Name": "FunctionName", "Value": "{}".format(self.lambda_name)}], )["Datapoints"][0]["Sum"] - except Exception as e: + except Exception: function_errors = 0 try: error_rate = "{0:.2f}%".format(function_errors / function_invocations * 100) - except: + except Exception: error_rate = "Error calculating" status_dict["Invocations (24h)"] = int(function_invocations) status_dict["Errors (24h)"] = int(function_errors) @@ -1798,11 +1621,7 @@ def check_stage_name(self, stage_name): return True if self.stage_name_env_pattern.match(stage_name): return True - raise ValueError( - "API stage names must match a-zA-Z0-9_ ; '{0!s}' does not.".format( - stage_name - ) - ) + raise ValueError("API stage names must match a-zA-Z0-9_ ; '{0!s}' does not.".format(stage_name)) def check_environment(self, environment): """ @@ -1812,14 +1631,10 @@ def check_environment(self, environment): non_strings = [] for (k, v) in environment.items(): - if not isinstance(v, basestring): + if not isinstance(v, str): non_strings.append(k) if non_strings: - raise ValueError( - "The following environment variables are not strings: {}".format( - ", ".join(non_strings) - ) - ) + raise ValueError("The following environment variables are not strings: {}".format(", ".join(non_strings))) else: return True @@ -1836,9 +1651,7 @@ def init(self, settings_file="zappa_settings.json"): # Ensure that we don't already have a zappa_settings file. if os.path.isfile(settings_file): raise ClickException( - "This project already has a " - + click.style("{0!s} file".format(settings_file), fg="red", bold=True) - + "!" + "This project already has a " + click.style("{0!s} file".format(settings_file), fg="red", bold=True) + "!" ) # Explain system. @@ -1856,18 +1669,13 @@ def init(self, settings_file="zappa_settings.json"): ) click.echo( - click.style("Welcome to ", bold=True) - + click.style("Zappa", fg="green", bold=True) - + click.style("!\n", bold=True) + click.style("Welcome to ", bold=True) + click.style("Zappa", fg="green", bold=True) + click.style("!\n", bold=True) ) click.echo( - click.style("Zappa", bold=True) - + " is a system for running server-less Python web applications" + click.style("Zappa", bold=True) + " is a system for running server-less Python web applications" " on AWS Lambda and AWS API Gateway." ) - click.echo( - "This `init` command will help you create and configure your new Zappa deployment." - ) + click.echo("This `init` command will help you create and configure your new Zappa deployment.") click.echo("Let's get started!\n") # Create Env @@ -1881,10 +1689,7 @@ def init(self, settings_file="zappa_settings.json"): + click.style("production", bold=True) + "'." ) - env = ( - input("What do you want to call this environment (default 'dev'): ") - or "dev" - ) + env = input("What do you want to call this environment (default 'dev'): ") or "dev" try: self.check_stage_name(env) break @@ -1892,7 +1697,8 @@ def init(self, settings_file="zappa_settings.json"): click.echo(click.style("Stage names must match a-zA-Z0-9_", fg="red")) # Detect AWS profiles and regions - # If anyone knows a more straightforward way to easily detect and parse AWS profiles I'm happy to change this, feels like a hack + # If anyone knows a more straightforward way to easily detect and + # parse AWS profiles I'm happy to change this, feels like a hack session = botocore.session.Session() config = session.full_config profiles = config.get("profiles", {}) @@ -1906,16 +1712,15 @@ def init(self, settings_file="zappa_settings.json"): if not profile_names: profile_name, profile = None, None click.echo( - "We couldn't find an AWS profile to use. Before using Zappa, you'll need to set one up. See here for more info: {}".format( + "We couldn't find an AWS profile to use. " + "Before using Zappa, you'll need to set one up. See here for more info: {}".format( click.style(BOTO3_CONFIG_DOCS_URL, fg="blue", underline=True) ) ) elif len(profile_names) == 1: profile_name = profile_names[0] profile = profiles[profile_name] - click.echo( - "Okay, using profile {}!".format(click.style(profile_name, bold=True)) - ) + click.echo("Okay, using profile {}!".format(click.style(profile_name, bold=True))) else: if "default" in profile_names: default_profile = [p for p in profile_names if p == "default"][0] @@ -1944,22 +1749,12 @@ def init(self, settings_file="zappa_settings.json"): # Create Bucket click.echo( - "\nYour Zappa deployments will need to be uploaded to a " - + click.style("private S3 bucket", bold=True) - + "." + "\nYour Zappa deployments will need to be uploaded to a " + click.style("private S3 bucket", bold=True) + "." ) click.echo("If you don't have a bucket yet, we'll create one for you too.") - default_bucket = "zappa-" + "".join( - random.choice(string.ascii_lowercase + string.digits) for _ in range(9) - ) + default_bucket = "zappa-" + "".join(random.choice(string.ascii_lowercase + string.digits) for _ in range(9)) while True: - bucket = ( - input( - "What do you want to call your bucket? (default '%s'): " - % default_bucket - ) - or default_bucket - ) + bucket = input("What do you want to call your bucket? (default '%s'): " % default_bucket) or default_bucket if is_valid_bucket_name(bucket): break @@ -1985,32 +1780,24 @@ def init(self, settings_file="zappa_settings.json"): # Detect Django/Flask try: # pragma: no cover - import django + import django # noqa: F401 has_django = True - except ImportError as e: + except ImportError: has_django = False try: # pragma: no cover - import flask + import flask # noqa: F401 has_flask = True - except ImportError as e: + except ImportError: has_flask = False print("") # App-specific if has_django: # pragma: no cover - click.echo( - "It looks like this is a " - + click.style("Django", bold=True) - + " application!" - ) - click.echo( - "What is the " - + click.style("module path", bold=True) - + " to your projects's Django settings?" - ) + click.echo("It looks like this is a " + click.style("Django", bold=True) + " application!") + click.echo("What is the " + click.style("module path", bold=True) + " to your projects's Django settings?") django_settings = None matches = detect_django_settings() @@ -2023,34 +1810,18 @@ def init(self, settings_file="zappa_settings.json"): bold=True, ) ) - django_settings = ( - input( - "Where are your project's settings? (default '%s'): " - % matches[0] - ) - or matches[0] - ) + django_settings = input("Where are your project's settings? (default '%s'): " % matches[0]) or matches[0] else: - click.echo( - "(This will likely be something like 'your_project.settings')" - ) + click.echo("(This will likely be something like 'your_project.settings')") django_settings = input("Where are your project's settings?: ") django_settings = django_settings.replace("'", "") django_settings = django_settings.replace('"', "") else: matches = None if has_flask: - click.echo( - "It looks like this is a " - + click.style("Flask", bold=True) - + " application." - ) + click.echo("It looks like this is a " + click.style("Flask", bold=True) + " application.") matches = detect_flask_apps() - click.echo( - "What's the " - + click.style("modular path", bold=True) - + " to your app's function?" - ) + click.echo("What's the " + click.style("modular path", bold=True) + " to your app's function?") click.echo("This will likely be something like 'your_module.app'.") app_function = None while app_function in [None, ""]: @@ -2062,13 +1833,7 @@ def init(self, settings_file="zappa_settings.json"): bold=True, ) ) - app_function = ( - input( - "Where is your app's function? (default '%s'): " - % matches[0] - ) - or matches[0] - ) + app_function = input("Where is your app's function? (default '%s'): " % matches[0]) or matches[0] else: app_function = input("Where is your app's function?: ") app_function = app_function.replace("'", "") @@ -2085,9 +1850,7 @@ def init(self, settings_file="zappa_settings.json"): + click.style("all available regions", bold=True) + " in order to provide fast global service." ) - click.echo( - "If you are using Zappa for the first time, you probably don't want to do this!" - ) + click.echo("If you are using Zappa for the first time, you probably don't want to do this!") global_deployment = False while True: global_type = input( @@ -2138,27 +1901,12 @@ def init(self, settings_file="zappa_settings.json"): zappa_settings_json = json.dumps(zappa_settings, sort_keys=True, indent=4) - click.echo( - "\nOkay, here's your " - + click.style("zappa_settings.json", bold=True) - + ":\n" - ) + click.echo("\nOkay, here's your " + click.style("zappa_settings.json", bold=True) + ":\n") click.echo(click.style(zappa_settings_json, fg="yellow", bold=False)) - confirm = ( - input( - "\nDoes this look " - + click.style("okay", bold=True, fg="green") - + "? (default 'y') [y/n]: " - ) - or "yes" - ) + confirm = input("\nDoes this look " + click.style("okay", bold=True, fg="green") + "? (default 'y') [y/n]: ") or "yes" if confirm[0] not in ["y", "Y", "yes", "YES"]: - click.echo( - "" - + click.style("Sorry", bold=True, fg="red") - + " to hear that! Please init again." - ) + click.echo("" + click.style("Sorry", bold=True, fg="red") + " to hear that! Please init again.") return # Write @@ -2175,11 +1923,7 @@ def init(self, settings_file="zappa_settings.json"): ) click.echo(click.style("\t$ zappa deploy --all", bold=True)) - click.echo( - "\nAfter that, you can " - + click.style("update", bold=True) - + " your application code with:\n" - ) + click.echo("\nAfter that, you can " + click.style("update", bold=True) + " your application code with:\n") click.echo(click.style("\t$ zappa update --all", bold=True)) else: click.echo( @@ -2191,11 +1935,7 @@ def init(self, settings_file="zappa_settings.json"): ) click.echo(click.style("\t$ zappa deploy %s" % env, bold=True)) - click.echo( - "\nAfter that, you can " - + click.style("update", bold=True) - + " your application code with:\n" - ) + click.echo("\nAfter that, you can " + click.style("update", bold=True) + " your application code with:\n") click.echo(click.style("\t$ zappa update %s" % env, bold=True)) click.echo( @@ -2222,9 +1962,7 @@ def certify(self, no_confirm=True, manual=False): if not self.domain: raise ClickException( - "Can't certify a domain without " - + click.style("domain", fg="red", bold=True) - + " configured!" + "Can't certify a domain without " + click.style("domain", fg="red", bold=True) + " configured!" ) if not no_confirm: # pragma: no cover @@ -2273,15 +2011,11 @@ def certify(self, no_confirm=True, manual=False): if account_key_location.startswith("s3://"): bucket, key_name = parse_s3_url(account_key_location) - self.zappa.s3_client.download_file( - bucket, key_name, os.path.join(gettempdir(), "account.key") - ) + self.zappa.s3_client.download_file(bucket, key_name, os.path.join(gettempdir(), "account.key")) else: from shutil import copyfile - copyfile( - account_key_location, os.path.join(gettempdir(), "account.key") - ) + copyfile(account_key_location, os.path.join(gettempdir(), "account.key")) # Prepare for Custom SSL elif not account_key_location and not cert_arn: @@ -2306,11 +2040,7 @@ def certify(self, no_confirm=True, manual=False): with open(cert_chain_location) as f: certificate_chain = f.read() - click.echo( - "Certifying domain " - + click.style(self.domain, fg="green", bold=True) - + ".." - ) + click.echo("Certifying domain " + click.style(self.domain, fg="green", bold=True) + "..") # Get cert and update domain. @@ -2318,9 +2048,7 @@ def certify(self, no_confirm=True, manual=False): if not cert_location and not cert_arn: from .letsencrypt import get_cert_and_update_domain - cert_success = get_cert_and_update_domain( - self.zappa, self.lambda_name, self.api_stage, self.domain, manual - ) + cert_success = get_cert_and_update_domain(self.zappa, self.lambda_name, self.api_stage, self.domain, manual) # Custom SSL / ACM else: @@ -2340,7 +2068,8 @@ def certify(self, no_confirm=True, manual=False): if route53: self.zappa.update_route53_records(self.domain, dns_name) print( - "Created a new domain name with supplied certificate. Please note that it can take up to 40 minutes for this domain to be " + "Created a new domain name with supplied certificate. " + "Please note that it can take up to 40 minutes for this domain to be " "created and propagated through AWS, but it requires no further work on your part." ) else: @@ -2360,14 +2089,9 @@ def certify(self, no_confirm=True, manual=False): cert_success = True if cert_success: - click.echo( - "Certificate " + click.style("updated", fg="green", bold=True) + "!" - ) + click.echo("Certificate " + click.style("updated", fg="green", bold=True) + "!") else: - click.echo( - click.style("Failed", fg="red", bold=True) - + " to generate or install certificate! :(" - ) + click.echo(click.style("Failed", fg="red", bold=True) + " to generate or install certificate! :(") click.echo("\n==============\n") shamelessly_promote() @@ -2415,9 +2139,7 @@ def callback(self, position): working_dir = os.getcwd() working_dir_importer = pkgutil.get_importer(working_dir) - module_ = working_dir_importer.find_module(mod_name).load_module( - mod_name - ) + module_ = working_dir_importer.find_module(mod_name).load_module(mod_name) except (ImportError, AttributeError): @@ -2431,21 +2153,15 @@ def callback(self, position): "import {position} callback ".format(position=position), bold=True, ) - + 'module: "{mod_path}"'.format( - mod_path=click.style(mod_path, bold=True) - ) + + 'module: "{mod_path}"'.format(mod_path=click.style(mod_path, bold=True)) ) if not hasattr(module_, cb_func_name): # pragma: no cover raise ClickException( click.style("Failed ", fg="red") + "to " - + click.style( - "find {position} callback ".format(position=position), bold=True - ) - + 'function: "{cb_func_name}" '.format( - cb_func_name=click.style(cb_func_name, bold=True) - ) + + click.style("find {position} callback ".format(position=position), bold=True) + + 'function: "{cb_func_name}" '.format(cb_func_name=click.style(cb_func_name, bold=True)) + 'in module "{mod_path}"'.format(mod_path=mod_path) ) @@ -2466,10 +2182,7 @@ def check_for_update(self): + click.style("Zappa", bold=True) + " is available!" ) - click.echo( - "Upgrade with: " - + click.style("pip install zappa --upgrade", bold=True) - ) + click.echo("Upgrade with: " + click.style("pip install zappa --upgrade", bold=True)) click.echo( "Visit the project page on GitHub to see the latest changes: " + click.style("https://github.com/Zappa/Zappa", bold=True) @@ -2496,11 +2209,7 @@ def load_settings(self, settings_file=None, session=None): # Make sure that this stage is our settings if self.api_stage not in self.zappa_settings.keys(): - raise ClickException( - "Please define stage '{0!s}' in your Zappa settings.".format( - self.api_stage - ) - ) + raise ClickException("Please define stage '{0!s}' in your Zappa settings.".format(self.api_stage)) # We need a working title for this project. Use one if supplied, else cwd dirname. if "project_name" in self.stage_config: # pragma: no cover @@ -2519,10 +2228,7 @@ def load_settings(self, settings_file=None, session=None): # Load stage-specific settings self.s3_bucket_name = self.stage_config.get( "s3_bucket", - "zappa-" - + "".join( - random.choice(string.ascii_lowercase + string.digits) for _ in range(9) - ), + "zappa-" + "".join(random.choice(string.ascii_lowercase + string.digits) for _ in range(9)), ) self.vpc_config = self.stage_config.get("vpc_config", {}) self.memory_size = self.stage_config.get("memory_size", 512) @@ -2537,27 +2243,18 @@ def load_settings(self, settings_file=None, session=None): self.base_path = self.stage_config.get("base_path", None) self.timeout_seconds = self.stage_config.get("timeout_seconds", 30) dead_letter_arn = self.stage_config.get("dead_letter_arn", "") - self.dead_letter_config = ( - {"TargetArn": dead_letter_arn} if dead_letter_arn else {} - ) + self.dead_letter_config = {"TargetArn": dead_letter_arn} if dead_letter_arn else {} self.cognito = self.stage_config.get("cognito", None) - self.num_retained_versions = self.stage_config.get( - "num_retained_versions", None - ) + self.num_retained_versions = self.stage_config.get("num_retained_versions", None) # Check for valid values of num_retained_versions - if ( - self.num_retained_versions is not None - and type(self.num_retained_versions) is not int - ): + if self.num_retained_versions is not None and type(self.num_retained_versions) is not int: raise ClickException( "Please supply either an integer or null for num_retained_versions in the zappa_settings.json. Found %s" % type(self.num_retained_versions) ) elif type(self.num_retained_versions) is int and self.num_retained_versions < 1: - raise ClickException( - "The value for num_retained_versions in the zappa_settings.json should be greater than 0." - ) + raise ClickException("The value for num_retained_versions in the zappa_settings.json should be greater than 0.") # Provide legacy support for `use_apigateway`, now `apigateway_enabled`. # https://github.com/Miserlou/Zappa/issues/490 @@ -2565,13 +2262,9 @@ def load_settings(self, settings_file=None, session=None): self.use_apigateway = self.stage_config.get("use_apigateway", True) if self.use_apigateway: self.use_apigateway = self.stage_config.get("apigateway_enabled", True) - self.apigateway_description = self.stage_config.get( - "apigateway_description", None - ) + self.apigateway_description = self.stage_config.get("apigateway_description", None) - self.lambda_handler = self.stage_config.get( - "lambda_handler", "handler.lambda_handler" - ) + self.lambda_handler = self.stage_config.get("lambda_handler", "handler.lambda_handler") # DEPRECATED. https://github.com/Miserlou/Zappa/issues/456 self.remote_env_bucket = self.stage_config.get("remote_env_bucket", None) self.remote_env_file = self.stage_config.get("remote_env_file", None) @@ -2582,28 +2275,18 @@ def load_settings(self, settings_file=None, session=None): self.binary_support = self.stage_config.get("binary_support", True) self.api_key_required = self.stage_config.get("api_key_required", False) self.api_key = self.stage_config.get("api_key") - self.endpoint_configuration = self.stage_config.get( - "endpoint_configuration", None - ) + self.endpoint_configuration = self.stage_config.get("endpoint_configuration", None) self.iam_authorization = self.stage_config.get("iam_authorization", False) self.cors = self.stage_config.get("cors", False) - self.lambda_description = self.stage_config.get( - "lambda_description", "Zappa Deployment" - ) + self.lambda_description = self.stage_config.get("lambda_description", "Zappa Deployment") self.lambda_concurrency = self.stage_config.get("lambda_concurrency", None) self.environment_variables = self.stage_config.get("environment_variables", {}) - self.aws_environment_variables = self.stage_config.get( - "aws_environment_variables", {} - ) + self.aws_environment_variables = self.stage_config.get("aws_environment_variables", {}) self.check_environment(self.environment_variables) self.authorizer = self.stage_config.get("authorizer", {}) - self.runtime = self.stage_config.get( - "runtime", get_runtime_from_python_version() - ) + self.runtime = self.stage_config.get("runtime", get_runtime_from_python_version()) self.aws_kms_key_arn = self.stage_config.get("aws_kms_key_arn", "") - self.context_header_mappings = self.stage_config.get( - "context_header_mappings", {} - ) + self.context_header_mappings = self.stage_config.get("context_header_mappings", {}) self.xray_tracing = self.stage_config.get("xray_tracing", False) self.desired_role_arn = self.stage_config.get("role_arn") self.layers = self.stage_config.get("layers", None) @@ -2666,9 +2349,7 @@ def get_json_or_yaml_settings(self, settings_name="zappa_settings"): and not os.path.isfile(zs_yaml) and not os.path.isfile(zs_toml) ): - raise ClickException( - "Please configure a zappa_settings file or call `zappa init`." - ) + raise ClickException("Please configure a zappa_settings file or call `zappa init`.") # Prefer JSON if os.path.isfile(zs_json): @@ -2690,9 +2371,7 @@ def load_settings_file(self, settings_file=None): if not settings_file: settings_file = self.get_json_or_yaml_settings() if not os.path.isfile(settings_file): - raise ClickException( - "Please configure your zappa_settings file or call `zappa init`." - ) + raise ClickException("Please configure your zappa_settings file or call `zappa init`.") path, ext = os.path.splitext(settings_file) if ext == ".yml" or ext == ".yaml": @@ -2700,27 +2379,21 @@ def load_settings_file(self, settings_file=None): try: self.zappa_settings = yaml.safe_load(yaml_file) except ValueError: # pragma: no cover - raise ValueError( - "Unable to load the Zappa settings YAML. It may be malformed." - ) + raise ValueError("Unable to load the Zappa settings YAML. It may be malformed.") elif ext == ".toml": with open(settings_file) as toml_file: try: self.zappa_settings = toml.load(toml_file) except ValueError: # pragma: no cover - raise ValueError( - "Unable to load the Zappa settings TOML. It may be malformed." - ) + raise ValueError("Unable to load the Zappa settings TOML. It may be malformed.") else: with open(settings_file) as json_file: try: self.zappa_settings = json.load(json_file) except ValueError: # pragma: no cover - raise ValueError( - "Unable to load the Zappa settings JSON. It may be malformed." - ) + raise ValueError("Unable to load the Zappa settings JSON. It may be malformed.") - def create_package(self, output=None): + def create_package(self, output=None, use_zappa_release: Optional[str] = None): """ Ensure that the package can be properly configured, and then create it. @@ -2729,12 +2402,8 @@ def create_package(self, output=None): # Create the Lambda zip package (includes project and virtualenvironment) # Also define the path the handler file so it can be copied to the zip # root for Lambda. - current_file = os.path.dirname( - os.path.abspath(inspect.getfile(inspect.currentframe())) - ) - handler_file = ( - os.sep.join(current_file.split(os.sep)[0:]) + os.sep + "handler.py" - ) + current_file = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) + handler_file = os.sep.join(current_file.split(os.sep)[0:]) + os.sep + "handler.py" # Create the zip file(s) if self.stage_config.get("slim_handler", False): @@ -2742,9 +2411,7 @@ def create_package(self, output=None): # https://github.com/Miserlou/Zappa/issues/510 self.zip_path = self.zappa.create_lambda_zip( prefix=self.lambda_name, - use_precompiled_packages=self.stage_config.get( - "use_precompiled_packages", True - ), + use_precompiled_packages=self.stage_config.get("use_precompiled_packages", True), exclude=self.stage_config.get("exclude", []), exclude_glob=self.stage_config.get("exclude_glob", []), disable_progress=self.disable_progress, @@ -2757,7 +2424,7 @@ def create_package(self, output=None): exclude.append(cur_venv.split("/")[-1]) self.handler_path = self.zappa.create_lambda_zip( prefix="handler_{0!s}".format(self.lambda_name), - venv=self.zappa.create_handler_venv(), + venv=self.zappa.create_handler_venv(use_zappa_release=use_zappa_release), handler_file=handler_file, slim_handler=True, exclude=exclude, @@ -2766,18 +2433,13 @@ def create_package(self, output=None): disable_progress=self.disable_progress, ) else: - # This could be python3.6 optimized. - exclude = self.stage_config.get( - "exclude", ["boto3", "dateutil", "botocore", "s3transfer", "concurrent"] - ) + exclude = self.stage_config.get("exclude", ["boto3", "dateutil", "botocore", "s3transfer", "concurrent"]) # Create a single zip that has the handler and application self.zip_path = self.zappa.create_lambda_zip( prefix=self.lambda_name, handler_file=handler_file, - use_precompiled_packages=self.stage_config.get( - "use_precompiled_packages", True - ), + use_precompiled_packages=self.stage_config.get("use_precompiled_packages", True), exclude=exclude, exclude_glob=self.stage_config.get("exclude_glob", []), output=output, @@ -2831,12 +2493,7 @@ def get_zappa_settings_string(self): + "`." ) app_module, app_function = self.app_function.rsplit(".", 1) - settings_s = ( - settings_s - + "APP_MODULE='{0!s}'\nAPP_FUNCTION='{1!s}'\n".format( - app_module, app_function - ) - ) + settings_s = settings_s + "APP_MODULE='{0!s}'\nAPP_FUNCTION='{1!s}'\n".format(app_module, app_function) if self.exception_handler: settings_s += "EXCEPTION_HANDLER='{0!s}'\n".format(self.exception_handler) @@ -2876,9 +2533,7 @@ def get_zappa_settings_string(self): settings_s = settings_s + "REMOTE_ENV='{0!s}'\n".format(self.remote_env) # DEPRECATED. use remove_env instead elif self.remote_env_bucket and self.remote_env_file: - settings_s = settings_s + "REMOTE_ENV='s3://{0!s}/{1!s}'\n".format( - self.remote_env_bucket, self.remote_env_file - ) + settings_s = settings_s + "REMOTE_ENV='s3://{0!s}/{1!s}'\n".format(self.remote_env_bucket, self.remote_env_file) # Local envs env_dict = {} @@ -2890,9 +2545,7 @@ def get_zappa_settings_string(self): # https://github.com/Miserlou/Zappa/issues/604 # https://github.com/Miserlou/Zappa/issues/998 try: - env_dict = dict( - (k.encode("ascii").decode("ascii"), v) for (k, v) in env_dict.items() - ) + env_dict = dict((k.encode("ascii").decode("ascii"), v) for (k, v) in env_dict.items()) except Exception: raise ValueError("Environment variable keys must be ascii.") @@ -2903,25 +2556,19 @@ def get_zappa_settings_string(self): settings_s = settings_s + "PROJECT_NAME='{0!s}'\n".format((self.project_name)) if self.settings_file: - settings_s = settings_s + "SETTINGS_FILE='{0!s}'\n".format( - (self.settings_file) - ) + settings_s = settings_s + "SETTINGS_FILE='{0!s}'\n".format((self.settings_file)) else: settings_s = settings_s + "SETTINGS_FILE=None\n" if self.django_settings: - settings_s = settings_s + "DJANGO_SETTINGS='{0!s}'\n".format( - (self.django_settings) - ) + settings_s = settings_s + "DJANGO_SETTINGS='{0!s}'\n".format((self.django_settings)) else: settings_s = settings_s + "DJANGO_SETTINGS=None\n" # If slim handler, path to project zip if self.stage_config.get("slim_handler", False): - settings_s += ( - "ARCHIVE_PATH='s3://{0!s}/{1!s}_{2!s}_current_project.tar.gz'\n".format( - self.s3_bucket_name, self.api_stage, self.project_name - ) + settings_s += "ARCHIVE_PATH='s3://{0!s}/{1!s}_{2!s}_current_project.tar.gz'\n".format( + self.s3_bucket_name, self.api_stage, self.project_name ) # since includes are for slim handler add the setting here by joining arbitrary list from zappa_settings file @@ -2952,13 +2599,9 @@ def get_zappa_settings_string(self): invocation_source = event_source.get("invocation_source") function = bot_event.get("function") if intent and invocation_source and function: - bot_events_mapping[ - str(intent) + ":" + str(invocation_source) - ] = function + bot_events_mapping[str(intent) + ":" + str(invocation_source)] = function - settings_s = settings_s + "AWS_BOT_EVENT_MAPPING={0!s}\n".format( - bot_events_mapping - ) + settings_s = settings_s + "AWS_BOT_EVENT_MAPPING={0!s}\n".format(bot_events_mapping) # Map cognito triggers cognito_trigger_mapping = {} @@ -2969,9 +2612,7 @@ def get_zappa_settings_string(self): function = trigger.get("function") if source and function: cognito_trigger_mapping[source] = function - settings_s = settings_s + "COGNITO_TRIGGER_MAPPING={0!s}\n".format( - cognito_trigger_mapping - ) + settings_s = settings_s + "COGNITO_TRIGGER_MAPPING={0!s}\n".format(cognito_trigger_mapping) # Authorizer config authorizer_function = self.authorizer.get("function", None) @@ -2994,7 +2635,7 @@ def remove_local_zip(self): os.remove(self.zip_path) if self.handler_path and os.path.isfile(self.handler_path): os.remove(self.handler_path) - except Exception as e: # pragma: no cover + except Exception: # pragma: no cover sys.exit(-1) def remove_uploaded_zip(self): @@ -3021,9 +2662,7 @@ def on_exit(self): self.remove_local_zip() - def print_logs( - self, logs, colorize=True, http=False, non_http=False, force_colorize=None - ): + def print_logs(self, logs, colorize=True, http=False, non_http=False, force_colorize=None): """ Parse, filter and print logs to the console. """ @@ -3111,54 +2750,40 @@ def colorize_log_entry(self, string): if token in ["CRITICAL", "ERROR", "WARNING", "DEBUG", "INFO", "NOTSET"]: final_string = final_string.replace( "[" + token + "]", - click.style("[", fg="cyan") - + click.style(token, fg="cyan", bold=True) - + click.style("]", fg="cyan"), + click.style("[", fg="cyan") + click.style(token, fg="cyan", bold=True) + click.style("]", fg="cyan"), ) else: final_string = final_string.replace( "[" + token + "]", - click.style("[", fg="cyan") - + click.style(token, bold=True) - + click.style("]", fg="cyan"), + click.style("[", fg="cyan") + click.style(token, bold=True) + click.style("]", fg="cyan"), ) # Then do quoted strings quotes = re.findall(r'"[^"]*"', string) for token in quotes: - final_string = final_string.replace( - token, click.style(token, fg="yellow") - ) + final_string = final_string.replace(token, click.style(token, fg="yellow")) # And UUIDs for token in final_string.replace("\t", " ").split(" "): try: if token.count("-") == 4 and token.replace("-", "").isalnum(): - final_string = final_string.replace( - token, click.style(token, fg="magenta") - ) + final_string = final_string.replace(token, click.style(token, fg="magenta")) except Exception: # pragma: no cover pass # And IP addresses try: if token.count(".") == 3 and token.replace(".", "").isnumeric(): - final_string = final_string.replace( - token, click.style(token, fg="red") - ) + final_string = final_string.replace(token, click.style(token, fg="red")) except Exception: # pragma: no cover pass # And status codes try: if token in ["200"]: - final_string = final_string.replace( - token, click.style(token, fg="green") - ) + final_string = final_string.replace(token, click.style(token, fg="green")) if token in ["400", "401", "403", "404", "405", "500"]: - final_string = final_string.replace( - token, click.style(token, fg="red") - ) + final_string = final_string.replace(token, click.style(token, fg="red")) except Exception: # pragma: no cover pass @@ -3175,10 +2800,7 @@ def colorize_log_entry(self, string): # And dates for token in final_string.split("\t"): try: - is_date = parser.parse(token) - final_string = final_string.replace( - token, click.style(token, fg="green") - ) + final_string = final_string.replace(token, click.style(token, fg="green")) except Exception: # pragma: no cover pass @@ -3186,7 +2808,7 @@ def colorize_log_entry(self, string): if final_string[0] != " ": final_string = " " + final_string return final_string - except Exception as e: # pragma: no cover + except Exception: # pragma: no cover return string def execute_prebuild_script(self): @@ -3197,9 +2819,7 @@ def execute_prebuild_script(self): (pb_mod_path, pb_func) = self.prebuild_script.rsplit(".", 1) try: # Prefer prebuild script in working directory - if ( - pb_mod_path.count(".") >= 1 - ): # Prebuild script func is nested in a folder + if pb_mod_path.count(".") >= 1: # Prebuild script func is nested in a folder (mod_folder_path, mod_name) = pb_mod_path.rsplit(".", 1) mod_folder_path_fragments = mod_folder_path.split(".") working_dir = os.path.join(os.getcwd(), *mod_folder_path_fragments) @@ -3219,9 +2839,7 @@ def execute_prebuild_script(self): click.style("Failed ", fg="red") + "to " + click.style("import prebuild script ", bold=True) - + 'module: "{pb_mod_path}"'.format( - pb_mod_path=click.style(pb_mod_path, bold=True) - ) + + 'module: "{pb_mod_path}"'.format(pb_mod_path=click.style(pb_mod_path, bold=True)) ) if not hasattr(module_, pb_func): # pragma: no cover @@ -3229,9 +2847,7 @@ def execute_prebuild_script(self): click.style("Failed ", fg="red") + "to " + click.style("find prebuild script ", bold=True) - + 'function: "{pb_func}" '.format( - pb_func=click.style(pb_func, bold=True) - ) + + 'function: "{pb_func}" '.format(pb_func=click.style(pb_func, bold=True)) + 'in module "{pb_mod_path}"'.format(pb_mod_path=pb_mod_path) ) @@ -3275,13 +2891,9 @@ def deploy_api_gateway(self, api_id): cache_cluster_size=cache_cluster_size, cloudwatch_log_level=self.stage_config.get("cloudwatch_log_level", "OFF"), cloudwatch_data_trace=self.stage_config.get("cloudwatch_data_trace", False), - cloudwatch_metrics_enabled=self.stage_config.get( - "cloudwatch_metrics_enabled", False - ), + cloudwatch_metrics_enabled=self.stage_config.get("cloudwatch_metrics_enabled", False), cache_cluster_ttl=self.stage_config.get("cache_cluster_ttl", 300), - cache_cluster_encrypted=self.stage_config.get( - "cache_cluster_encrypted", False - ), + cache_cluster_encrypted=self.stage_config.get("cache_cluster_encrypted", False), ) return endpoint_url @@ -3328,8 +2940,7 @@ def touch_endpoint(self, endpoint_url): # See: https://github.com/Miserlou/Zappa/pull/1719#issuecomment-471341565 if "PRIVATE" in self.stage_config.get("endpoint_configuration", []): print( - click.style("Warning!", fg="yellow", bold=True) - + " Since you're deploying a private API Gateway endpoint," + click.style("Warning!", fg="yellow", bold=True) + " Since you're deploying a private API Gateway endpoint," " Zappa cannot determine if your function is returning " " a correct status code. You should check your API's response" " manually before considering this deployment complete." @@ -3427,12 +3038,10 @@ def handle(): # pragma: no cover except KeyboardInterrupt: # pragma: no cover cli.on_exit() sys.exit(130) - except Exception as e: + except Exception: cli.on_exit() - click.echo( - "Oh no! An " + click.style("error occurred", fg="red", bold=True) + "! :(" - ) + click.echo("Oh no! An " + click.style("error occurred", fg="red", bold=True) + "! :(") click.echo("\n==============\n") import traceback diff --git a/zappa/core.py b/zappa/core.py index 51fa74836..d19b6dfe1 100644 --- a/zappa/core.py +++ b/zappa/core.py @@ -24,6 +24,7 @@ from builtins import bytes, int from distutils.dir_util import copy_tree from io import open +from typing import Optional import boto3 import botocore @@ -280,7 +281,7 @@ def __init__( load_credentials=True, desired_role_name=None, desired_role_arn=None, - runtime="python3.6", # Detected at runtime in CLI + runtime="python3.7", # Detected at runtime in CLI tags=(), endpoint_urls={}, xray_tracing=False, @@ -305,9 +306,7 @@ def __init__( self.runtime = runtime - if self.runtime == "python3.6": - self.manylinux_suffix_start = "cp36m" - elif self.runtime == "python3.7": + if self.runtime == "python3.7": self.manylinux_suffix_start = "cp37m" elif self.runtime == "python3.8": # The 'm' has been dropped in python 3.8+ since builds with and without pymalloc are ABI compatible @@ -319,10 +318,10 @@ def __init__( # AWS Lambda supports manylinux1/2010, manylinux2014, and manylinux_2_24 manylinux_suffixes = ("_2_24", "2014", "2010", "1") self.manylinux_wheel_file_match = re.compile( - f'^.*{self.manylinux_suffix_start}-(manylinux_\d+_\d+_x86_64[.])?manylinux({"|".join(manylinux_suffixes)})_x86_64[.]whl$' + rf'^.*{self.manylinux_suffix_start}-(manylinux_\d+_\d+_x86_64[.])?manylinux({"|".join(manylinux_suffixes)})_x86_64[.]whl$' # noqa: E501 ) self.manylinux_wheel_abi3_file_match = re.compile( - f'^.*cp3.-abi3-manylinux({"|".join(manylinux_suffixes)})_x86_64.whl$' + rf'^.*cp3.-abi3-manylinux({"|".join(manylinux_suffixes)})_x86_64.whl$' ) self.endpoint_urls = endpoint_urls @@ -372,21 +371,17 @@ def __init__( def configure_boto_session_method_kwargs(self, service, kw): """Allow for custom endpoint urls for non-AWS (testing and bootleg cloud) deployments""" - if service in self.endpoint_urls and not "endpoint_url" in kw: + if service in self.endpoint_urls and "endpoint_url" not in kw: kw["endpoint_url"] = self.endpoint_urls[service] return kw def boto_client(self, service, *args, **kwargs): """A wrapper to apply configuration options to boto clients""" - return self.boto_session.client( - service, *args, **self.configure_boto_session_method_kwargs(service, kwargs) - ) + return self.boto_session.client(service, *args, **self.configure_boto_session_method_kwargs(service, kwargs)) def boto_resource(self, service, *args, **kwargs): """A wrapper to apply configuration options to boto resources""" - return self.boto_session.resource( - service, *args, **self.configure_boto_session_method_kwargs(service, kwargs) - ) + return self.boto_session.resource(service, *args, **self.configure_boto_session_method_kwargs(service, kwargs)) def cache_param(self, value): """Returns a troposphere Ref to a value cached as a parameter.""" @@ -394,9 +389,7 @@ def cache_param(self, value): if value not in self.cf_parameters: keyname = chr(ord("A") + len(self.cf_parameters)) param = self.cf_template.add_parameter( - troposphere.Parameter( - keyname, Type="String", Default=value, tags=self.tags - ) + troposphere.Parameter(keyname, Type="String", Default=value, tags=self.tags) ) self.cf_parameters[value] = param @@ -412,12 +405,7 @@ def copy_editable_packages(self, egg_links, temp_package_path): for egg_link in egg_links: with open(egg_link, "rb") as df: egg_path = df.read().decode("utf-8").splitlines()[0].strip() - pkgs = set( - [ - x.split(".")[0] - for x in find_packages(egg_path, exclude=["test", "tests"]) - ] - ) + pkgs = set([x.split(".")[0] for x in find_packages(egg_path, exclude=["test", "tests"])]) for pkg in pkgs: copytree( os.path.join(egg_path, pkg), @@ -447,12 +435,10 @@ def get_deps_list(self, pkg_name, installed_distros=None): if package.project_name.lower() == pkg_name.lower(): deps = [(package.project_name, package.version)] for req in package.requires(): - deps += self.get_deps_list( - pkg_name=req.project_name, installed_distros=installed_distros - ) + deps += self.get_deps_list(pkg_name=req.project_name, installed_distros=installed_distros) return list(set(deps)) # de-dupe before returning - def create_handler_venv(self): + def create_handler_venv(self, use_zappa_release: Optional[str] = None): """ Takes the installed zappa and brings it into a fresh virtualenv-like folder. All dependencies are then downloaded. """ @@ -465,34 +451,34 @@ def create_handler_venv(self): ve_path = os.path.join(os.getcwd(), "handler_venv") if os.sys.platform == "win32": - current_site_packages_dir = os.path.join( - current_venv, "Lib", "site-packages" - ) + current_site_packages_dir = os.path.join(current_venv, "Lib", "site-packages") venv_site_packages_dir = os.path.join(ve_path, "Lib", "site-packages") else: - current_site_packages_dir = os.path.join( - current_venv, "lib", get_venv_from_python_version(), "site-packages" - ) - venv_site_packages_dir = os.path.join( - ve_path, "lib", get_venv_from_python_version(), "site-packages" - ) + current_site_packages_dir = os.path.join(current_venv, "lib", get_venv_from_python_version(), "site-packages") + venv_site_packages_dir = os.path.join(ve_path, "lib", get_venv_from_python_version(), "site-packages") if not os.path.isdir(venv_site_packages_dir): os.makedirs(venv_site_packages_dir) # Copy zappa* to the new virtualenv - zappa_things = [ - z for z in os.listdir(current_site_packages_dir) if z.lower()[:5] == "zappa" - ] + zappa_things = [z for z in os.listdir(current_site_packages_dir) if z.lower()[:5] == "zappa"] for z in zappa_things: copytree( os.path.join(current_site_packages_dir, z), os.path.join(venv_site_packages_dir, z), ) - # Use pip to download zappa's dependencies. Copying from current venv causes issues with things like PyYAML that installs as yaml + # Use pip to download zappa's dependencies. + # Copying from current venv causes issues with things like PyYAML that installs as yaml zappa_deps = self.get_deps_list("zappa") - pkg_list = ["{0!s}=={1!s}".format(dep, version) for dep, version in zappa_deps] + pkg_list = [] + for dep, version in zappa_deps: + # allow specified zappa version for slim_handler_test + if dep == "zappa" and use_zappa_release: + pkg_version_str = f"{dep}=={use_zappa_release}" + else: + pkg_version_str = f"{dep}=={version}" + pkg_list.append(pkg_version_str) # Need to manually add setuptools pkg_list.append("setuptools") @@ -529,17 +515,12 @@ def get_current_venv(): try: subprocess.check_output(["pyenv", "help"], stderr=subprocess.STDOUT) except OSError: - print( - "This directory seems to have pyenv's local venv, " - "but pyenv executable was not found." - ) + print("This directory seems to have pyenv's local venv, " "but pyenv executable was not found.") with open(".python-version", "r") as f: # minor fix in how .python-version is read # Related: https://github.com/Miserlou/Zappa/issues/921 env_name = f.readline().strip() - bin_path = subprocess.check_output(["pyenv", "which", "python"]).decode( - "utf-8" - ) + bin_path = subprocess.check_output(["pyenv", "which", "python"]).decode("utf-8") venv = bin_path[: bin_path.rfind(env_name)] + env_name else: # pragma: no cover return None @@ -566,13 +547,11 @@ def create_lambda_zip( """ # Validate archive_format if archive_format not in ["zip", "tarball"]: - raise KeyError( - "The archive format to create a lambda package must be zip or tarball" - ) + raise KeyError("The archive format to create a lambda package must be zip or tarball") # Pip is a weird package. # Calling this function in some environments without this can cause.. funkiness. - import pip + import pip # noqa: 547 if not venv: venv = self.get_current_venv() @@ -600,7 +579,7 @@ def create_lambda_zip( # Make sure that 'concurrent' is always forbidden. # https://github.com/Miserlou/Zappa/issues/827 - if not "concurrent" in exclude: + if "concurrent" not in exclude: exclude.append("concurrent") def splitpath(path): @@ -693,9 +672,7 @@ def splitpath(path): # json.dump(build_info, f) # return True - package_id_file = open( - os.path.join(temp_project_path, "package_info.json"), "w" - ) + package_id_file = open(os.path.join(temp_project_path, "package_info.json"), "w") dumped = json.dumps(package_info, indent=4) try: package_id_file.write(dumped) @@ -709,9 +686,7 @@ def splitpath(path): if os.sys.platform == "win32": site_packages = os.path.join(venv, "Lib", "site-packages") else: - site_packages = os.path.join( - venv, "lib", get_venv_from_python_version(), "site-packages" - ) + site_packages = os.path.join(venv, "lib", get_venv_from_python_version(), "site-packages") egg_links.extend(glob.glob(os.path.join(site_packages, "*.egg-link"))) if minify: @@ -727,9 +702,7 @@ def splitpath(path): copytree(site_packages, temp_package_path, metadata=False, symlinks=False) # We may have 64-bin specific packages too. - site_packages_64 = os.path.join( - venv, "lib64", get_venv_from_python_version(), "site-packages" - ) + site_packages_64 = os.path.join(venv, "lib64", get_venv_from_python_version(), "site-packages") if os.path.exists(site_packages_64): egg_links.extend(glob.glob(os.path.join(site_packages_64, "*.egg-link"))) if minify: @@ -742,9 +715,7 @@ def splitpath(path): ignore=shutil.ignore_patterns(*excludes), ) else: - copytree( - site_packages_64, temp_package_path, metadata=False, symlinks=False - ) + copytree(site_packages_64, temp_package_path, metadata=False, symlinks=False) if egg_links: self.copy_editable_packages(egg_links, temp_package_path) @@ -754,9 +725,7 @@ def splitpath(path): # Then the pre-compiled packages.. if use_precompiled_packages: print("Downloading and installing dependencies..") - installed_packages = self.get_installed_packages( - site_packages, site_packages_64 - ) + installed_packages = self.get_installed_packages(site_packages, site_packages_64) try: for ( @@ -838,21 +807,13 @@ def splitpath(path): if archive_format == "zip": # Actually put the file into the proper place in the zip # Related: https://github.com/Miserlou/Zappa/pull/716 - zipi = zipfile.ZipInfo( - os.path.join( - root.replace(temp_project_path, "").lstrip(os.sep), filename - ) - ) + zipi = zipfile.ZipInfo(os.path.join(root.replace(temp_project_path, "").lstrip(os.sep), filename)) zipi.create_system = 3 zipi.external_attr = 0o755 << int(16) # Is this P2/P3 functional? with open(os.path.join(root, filename), "rb") as f: archivef.writestr(zipi, f.read(), compression_method) elif archive_format == "tarball": - tarinfo = tarfile.TarInfo( - os.path.join( - root.replace(temp_project_path, "").lstrip(os.sep), filename - ) - ) + tarinfo = tarfile.TarInfo(os.path.join(root.replace(temp_project_path, "").lstrip(os.sep), filename)) tarinfo.mode = 0o755 stat = os.stat(os.path.join(root, filename)) @@ -868,19 +829,14 @@ def splitpath(path): # if the directory does not contain any .py file at any level, we can skip the rest dirs[:] = [d for d in dirs if d != root] else: - if ( - "__init__.py" not in files - and not conflicts_with_a_neighbouring_module(root) - ): + if "__init__.py" not in files and not conflicts_with_a_neighbouring_module(root): tmp_init = os.path.join(temp_project_path, "__init__.py") open(tmp_init, "a").close() os.chmod(tmp_init, 0o755) arcname = os.path.join( root.replace(temp_project_path, ""), - os.path.join( - root.replace(temp_project_path, ""), "__init__.py" - ), + os.path.join(root.replace(temp_project_path, ""), "__init__.py"), ) if archive_format == "zip": archivef.write(tmp_init, arcname) @@ -918,8 +874,7 @@ def get_installed_packages(site_packages, site_packages_64): package.project_name.lower(): package.version for package in pkg_resources.WorkingSet() if package.project_name.lower() in package_to_keep - or package.location.lower() - in [site_packages.lower(), site_packages_64.lower()] + or package.location.lower() in [site_packages.lower(), site_packages_64.lower()] } return installed_packages @@ -930,9 +885,7 @@ def download_url_with_progress(url, stream, disable_progress): Downloads a given url in chunks and writes to the provided stream (can be any io stream). Displays the progress bar for the download. """ - resp = requests.get( - url, timeout=float(os.environ.get("PIP_TIMEOUT", 2)), stream=True - ) + resp = requests.get(url, timeout=float(os.environ.get("PIP_TIMEOUT", 2)), stream=True) resp.raw.decode_content = True progress = tqdm( @@ -948,9 +901,7 @@ def download_url_with_progress(url, stream, disable_progress): progress.close() - def get_cached_manylinux_wheel( - self, package_name, package_version, disable_progress=False - ): + def get_cached_manylinux_wheel(self, package_name, package_version, disable_progress=False): """ Gets the locally stored version of a manylinux wheel. If one does not exist, the function downloads it. """ @@ -960,7 +911,7 @@ def get_cached_manylinux_wheel( os.makedirs(cached_wheels_dir) else: # Check if we already have a cached copy - wheel_name = re.sub("[^\w\d.]+", "_", package_name, re.UNICODE) + wheel_name = re.sub(r"[^\w\d.]+", "_", package_name, re.UNICODE) wheel_file = f"{wheel_name}-{package_version}-*_x86_64.whl" wheel_path = os.path.join(cached_wheels_dir, wheel_file) @@ -968,15 +919,11 @@ def get_cached_manylinux_wheel( if re.match(self.manylinux_wheel_file_match, pathname) or re.match( self.manylinux_wheel_abi3_file_match, pathname ): - print( - f" - {package_name}=={package_version}: Using locally cached manylinux wheel" - ) + print(f" - {package_name}=={package_version}: Using locally cached manylinux wheel") return pathname # The file is not cached, download it. - wheel_url, filename = self.get_manylinux_wheel_url( - package_name, package_version - ) + wheel_url, filename = self.get_manylinux_wheel_url(package_name, package_version) if not wheel_url: return None @@ -1017,17 +964,16 @@ def get_manylinux_wheel_url(self, package_name, package_version): else: url = "https://pypi.python.org/pypi/{}/json".format(package_name) try: - res = requests.get( - url, timeout=float(os.environ.get("PIP_TIMEOUT", 1.5)) - ) + res = requests.get(url, timeout=float(os.environ.get("PIP_TIMEOUT", 1.5))) data = res.json() - except Exception as e: # pragma: no cover + except Exception: # pragma: no cover return None, None with open(json_file_path, "wb") as metafile: jsondata = json.dumps(data) metafile.write(bytes(jsondata, "utf-8")) - if package_version not in data["releases"]: + if package_version not in data.get("releases", []): + logger.warning(f"package_version({package_version}) not found in {package_name} metafile={json_file_path}") return None, None for f in data["releases"][package_version]: @@ -1044,7 +990,8 @@ def get_manylinux_wheel_url(self, package_name, package_version): def upload_to_s3(self, source_path, bucket_name, disable_progress=False): r""" Given a file, upload it to S3. - Credentials should be stored in environment variables or ~/.aws/credentials (%USERPROFILE%\.aws\credentials on Windows). + Credentials should be stored in environment variables or + ~/.aws/credentials (%USERPROFILE%\.aws\credentials on Windows). Returns True on success, false on failure. """ try: @@ -1064,12 +1011,7 @@ def upload_to_s3(self, source_path, bucket_name, disable_progress=False): ) if self.tags: - tags = { - "TagSet": [ - {"Key": key, "Value": self.tags[key]} - for key in self.tags.keys() - ] - } + tags = {"TagSet": [{"Key": key, "Value": self.tags[key]} for key in self.tags.keys()]} self.s3_client.put_bucket_tagging(Bucket=bucket_name, Tagging=tags) if not os.path.isfile(source_path) or os.stat(source_path).st_size == 0: @@ -1092,10 +1034,8 @@ def upload_to_s3(self, source_path, bucket_name, disable_progress=False): # which cannot use the progress bar. # Related: https://github.com/boto/boto3/issues/611 try: - self.s3_client.upload_file( - source_path, bucket_name, dest_path, Callback=progress.update - ) - except Exception as e: # pragma: no cover + self.s3_client.upload_file(source_path, bucket_name, dest_path, Callback=progress.update) + except Exception: # pragma: no cover self.s3_client.upload_file(source_path, bucket_name, dest_path) progress.close() @@ -1121,9 +1061,7 @@ def copy_on_s3(self, src_file_name, dst_file_name, bucket_name): copy_src = {"Bucket": bucket_name, "Key": src_file_name} try: - self.s3_client.copy( - CopySource=copy_src, Bucket=bucket_name, Key=dst_file_name - ) + self.s3_client.copy(CopySource=copy_src, Bucket=bucket_name, Key=dst_file_name) return True except botocore.exceptions.ClientError: # pragma: no cover return False @@ -1168,7 +1106,7 @@ def create_lambda_function( publish=True, vpc_config=None, dead_letter_config=None, - runtime="python3.6", + runtime="python3.7", aws_environment_variables=None, aws_kms_key_arn=None, xray_tracing=False, @@ -1179,7 +1117,8 @@ def create_lambda_function( docker_image_uri=None, ): """ - Given a bucket and key (or a local path) of a valid Lambda-zip, a function name and a handler, register that Lambda function. + Given a bucket and key (or a local path) of a valid Lambda-zip, + a function name and a handler, register that Lambda function. """ if not vpc_config: vpc_config = {} @@ -1267,7 +1206,8 @@ def update_lambda_function( docker_image_uri=None, ): """ - Given a bucket and key (or a local path) of a valid Lambda-zip, a function name and a handler, update that Lambda function's code. + Given a bucket and key (or a local path) of a valid Lambda-zip, + a function name and a handler, update that Lambda function's code. Optionally, delete previous versions if they exceed the optional limit. """ print("Updating Lambda function code..") @@ -1321,9 +1261,7 @@ def update_lambda_function( # Find the existing revision IDs for the given function # Related: https://github.com/Miserlou/Zappa/issues/1402 versions_in_lambda = [] - versions = self.lambda_client.list_versions_by_function( - FunctionName=function_name - ) + versions = self.lambda_client.list_versions_by_function(FunctionName=function_name) for version in versions["Versions"]: versions_in_lambda.append(version["Version"]) while "NextMarker" in versions: @@ -1335,9 +1273,7 @@ def update_lambda_function( versions_in_lambda.remove("$LATEST") # Delete older revisions if their number exceeds the specified limit for version in versions_in_lambda[::-1][num_revisions:]: - self.lambda_client.delete_function( - FunctionName=function_name, Qualifier=version - ) + self.lambda_client.delete_function(FunctionName=function_name, Qualifier=version) self.wait_until_lambda_function_is_updated(function_name) @@ -1353,7 +1289,7 @@ def update_lambda_configuration( memory_size=512, publish=True, vpc_config=None, - runtime="python3.6", + runtime="python3.7", aws_environment_variables=None, aws_kms_key_arn=None, layers=None, @@ -1381,13 +1317,9 @@ def update_lambda_configuration( # Check if there are any remote aws lambda env vars so they don't get trashed. # https://github.com/Miserlou/Zappa/issues/987, Related: https://github.com/Miserlou/Zappa/issues/765 - lambda_aws_config = self.lambda_client.get_function_configuration( - FunctionName=function_name - ) + lambda_aws_config = self.lambda_client.get_function_configuration(FunctionName=function_name) if "Environment" in lambda_aws_config: - lambda_aws_environment_variables = lambda_aws_config["Environment"].get( - "Variables", {} - ) + lambda_aws_environment_variables = lambda_aws_config["Environment"].get("Variables", {}) # Append keys that are remote but not in settings file for key, value in lambda_aws_environment_variables.items(): if key not in aws_environment_variables: @@ -1443,51 +1375,32 @@ def invoke_lambda_function( Payload=payload, ) - def rollback_lambda_function_version( - self, function_name, versions_back=1, publish=True - ): + def rollback_lambda_function_version(self, function_name, versions_back=1, publish=True): """ Rollback the lambda function code 'versions_back' number of revisions. Returns the Function ARN. """ - response = self.lambda_client.list_versions_by_function( - FunctionName=function_name - ) + response = self.lambda_client.list_versions_by_function(FunctionName=function_name) # https://github.com/Miserlou/Zappa/pull/2192 - if ( - len(response.get("Versions", [])) > 1 - and response["Versions"][-1]["PackageType"] == "Image" - ): - raise NotImplementedError( - "Zappa's rollback functionality is not available for Docker based deployments" - ) + if len(response.get("Versions", [])) > 1 and response["Versions"][-1]["PackageType"] == "Image": + raise NotImplementedError("Zappa's rollback functionality is not available for Docker based deployments") # Take into account $LATEST if len(response["Versions"]) < versions_back + 1: print("We do not have {} revisions. Aborting".format(str(versions_back))) return False - revisions = [ - int(revision["Version"]) - for revision in response["Versions"] - if revision["Version"] != "$LATEST" - ] + revisions = [int(revision["Version"]) for revision in response["Versions"] if revision["Version"] != "$LATEST"] revisions.sort(reverse=True) response = self.lambda_client.get_function( - FunctionName="function:{}:{}".format( - function_name, revisions[versions_back] - ) + FunctionName="function:{}:{}".format(function_name, revisions[versions_back]) ) response = requests.get(response["Code"]["Location"]) if response.status_code != 200: - print( - "Failed to get version {} of {} code".format( - versions_back, function_name - ) - ) + print("Failed to get version {} of {} code".format(versions_back, function_name)) return False response = self.lambda_client.update_function_code( @@ -1527,9 +1440,7 @@ def get_lambda_function_versions(self, function_name): Simply returns the versions available for a Lambda function, given a function name. """ try: - response = self.lambda_client.list_versions_by_function( - FunctionName=function_name - ) + response = self.lambda_client.list_versions_by_function(FunctionName=function_name) return response.get("Versions", []) except Exception: return [] @@ -1554,19 +1465,13 @@ def deploy_lambda_alb(self, lambda_arn, lambda_name, alb_vpc_config, timeout): The `zappa deploy` functionality for ALB infrastructure. """ if not alb_vpc_config: - raise EnvironmentError( - "When creating an ALB, alb_vpc_config must be filled out in zappa_settings." - ) + raise EnvironmentError("When creating an ALB, alb_vpc_config must be filled out in zappa_settings.") if "SubnetIds" not in alb_vpc_config: - raise EnvironmentError( - "When creating an ALB, you must supply two subnets in different availability zones." - ) + raise EnvironmentError("When creating an ALB, you must supply two subnets in different availability zones.") if "SecurityGroupIds" not in alb_vpc_config: alb_vpc_config["SecurityGroupIds"] = [] if not alb_vpc_config.get("CertificateArn"): - raise EnvironmentError( - "When creating an ALB, you must supply a CertificateArn for the HTTPS listener." - ) + raise EnvironmentError("When creating an ALB, you must supply a CertificateArn for the HTTPS listener.") # Related: https://github.com/Miserlou/Zappa/issues/1856 if "Scheme" not in alb_vpc_config: @@ -1601,13 +1506,9 @@ def deploy_lambda_alb(self, lambda_arn, lambda_name, alb_vpc_config, timeout): ) load_balancer_arn = response["LoadBalancers"][0]["LoadBalancerArn"] load_balancer_dns = response["LoadBalancers"][0]["DNSName"] - load_balancer_vpc = response["LoadBalancers"][0]["VpcId"] + # load_balancer_vpc = response["LoadBalancers"][0]["VpcId"] waiter = self.elbv2_client.get_waiter("load_balancer_available") - print( - "Waiting for load balancer [{}] to become active..".format( - load_balancer_arn - ) - ) + print("Waiting for load balancer [{}] to become active..".format(load_balancer_arn)) waiter.wait(LoadBalancerArns=[load_balancer_arn], WaiterConfig={"Delay": 3}) # Match the lambda timeout on the load balancer. @@ -1627,9 +1528,8 @@ def deploy_lambda_alb(self, lambda_arn, lambda_name, alb_vpc_config, timeout): response = self.elbv2_client.create_target_group(**kwargs) if not (response["TargetGroups"]) or len(response["TargetGroups"]) != 1: raise EnvironmentError( - "Failure to create application load balancer target group. Response was in unexpected format. Response was: {}".format( - repr(response) - ) + "Failure to create application load balancer target group. " + "Response was in unexpected format. Response was: {}".format(repr(response)) ) target_group_arn = response["TargetGroups"][0]["TargetGroupArn"] @@ -1690,9 +1590,7 @@ def undeploy_lambda_alb(self, lambda_name): # Locate and delete alb/lambda permissions try: # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/lambda.html#Lambda.Client.remove_permission - self.lambda_client.remove_permission( - FunctionName=lambda_name, StatementId=lambda_name - ) + self.lambda_client.remove_permission(FunctionName=lambda_name, StatementId=lambda_name) except botocore.exceptions.ClientError as e: # pragma: no cover if "ResourceNotFoundException" in e.response["Error"]["Code"]: pass @@ -1701,19 +1599,15 @@ def undeploy_lambda_alb(self, lambda_name): # Locate and delete load balancer try: - # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/elbv2.html#ElasticLoadBalancingv2.Client.describe_load_balancers + # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/elbv2.html#ElasticLoadBalancingv2.Client.describe_load_balancers # noqa: E501 response = self.elbv2_client.describe_load_balancers(Names=[lambda_name]) if not (response["LoadBalancers"]) or len(response["LoadBalancers"]) > 1: raise EnvironmentError( - "Failure to locate/delete ALB named [{}]. Response was: {}".format( - lambda_name, repr(response) - ) + "Failure to locate/delete ALB named [{}]. Response was: {}".format(lambda_name, repr(response)) ) load_balancer_arn = response["LoadBalancers"][0]["LoadBalancerArn"] - # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/elbv2.html#ElasticLoadBalancingv2.Client.describe_listeners - response = self.elbv2_client.describe_listeners( - LoadBalancerArn=load_balancer_arn - ) + # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/elbv2.html#ElasticLoadBalancingv2.Client.describe_listeners # noqa: E501 + response = self.elbv2_client.describe_listeners(LoadBalancerArn=load_balancer_arn) if not (response["Listeners"]): print("No listeners found.") elif len(response["Listeners"]) > 1: @@ -1724,14 +1618,13 @@ def undeploy_lambda_alb(self, lambda_name): ) else: listener_arn = response["Listeners"][0]["ListenerArn"] - # Remove the listener. This explicit deletion of the listener seems necessary to avoid ResourceInUseExceptions when deleting target groups. - # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/elbv2.html#ElasticLoadBalancingv2.Client.delete_listener + # Remove the listener. + # This explicit deletion of the listener seems necessary to avoid ResourceInUseExceptions when deleting target groups. # noqa: E501# noqa: E501 + # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/elbv2.html#ElasticLoadBalancingv2.Client.delete_listener # noqa: E501 response = self.elbv2_client.delete_listener(ListenerArn=listener_arn) # Remove the load balancer and wait for completion - # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/elbv2.html#ElasticLoadBalancingv2.Client.delete_load_balancer - response = self.elbv2_client.delete_load_balancer( - LoadBalancerArn=load_balancer_arn - ) + # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/elbv2.html#ElasticLoadBalancingv2.Client.delete_load_balancer # noqa: E501 + response = self.elbv2_client.delete_load_balancer(LoadBalancerArn=load_balancer_arn) waiter = self.elbv2_client.get_waiter("load_balancers_deleted") print("Waiting for load balancer [{}] to be deleted..".format(lambda_name)) waiter.wait(LoadBalancerArns=[load_balancer_arn], WaiterConfig={"Delay": 3}) @@ -1759,9 +1652,7 @@ def undeploy_lambda_alb(self, lambda_name): ) target_group_arn = response["TargetGroups"][0]["TargetGroupArn"] # Deregister targets and wait for completion - self.elbv2_client.deregister_targets( - TargetGroupArn=target_group_arn, Targets=[{"Id": lambda_arn}] - ) + self.elbv2_client.deregister_targets(TargetGroupArn=target_group_arn, Targets=[{"Id": lambda_arn}]) waiter = self.elbv2_client.get_waiter("target_deregistered") print("Waiting for target [{}] to be deregistered...".format(lambda_name)) waiter.wait( @@ -1804,9 +1695,7 @@ def create_api_gateway_routes( if not description: description = "Created automatically by Zappa." restapi.Description = description - endpoint_configuration = ( - [] if endpoint_configuration is None else endpoint_configuration - ) + endpoint_configuration = [] if endpoint_configuration is None else endpoint_configuration if self.boto_session.region_name == "us-gov-west-1": endpoint_configuration.append("REGIONAL") if endpoint_configuration: @@ -1818,9 +1707,7 @@ def create_api_gateway_routes( self.cf_template.add_resource(restapi) root_id = troposphere.GetAtt(restapi, "RootResourceId") - invocation_prefix = ( - "aws" if self.boto_session.region_name != "us-gov-west-1" else "aws-us-gov" - ) + invocation_prefix = "aws" if self.boto_session.region_name != "us-gov-west-1" else "aws-us-gov" invocations_uri = ( "arn:" + invocation_prefix @@ -1837,15 +1724,13 @@ def create_api_gateway_routes( authorizer_resource = None if authorizer: authorizer_lambda_arn = authorizer.get("arn", lambda_arn) - lambda_uri = "arn:{invocation_prefix}:apigateway:{region_name}:lambda:path/2015-03-31/functions/{lambda_arn}/invocations".format( - invocation_prefix=invocation_prefix, - region_name=self.boto_session.region_name, - lambda_arn=authorizer_lambda_arn, - ) - authorizer_resource = self.create_authorizer( - restapi, lambda_uri, authorizer + lambda_uri = ( + f"arn:{invocation_prefix}:apigateway:{self.boto_session.region_name}:" + f"lambda:path/2015-03-31/functions/{authorizer_lambda_arn}/invocations" ) + authorizer_resource = self.create_authorizer(restapi, lambda_uri, authorizer) + self.create_and_setup_methods( restapi, root_id, @@ -1857,9 +1742,7 @@ def create_api_gateway_routes( ) if cors_options: - self.create_and_setup_cors( - restapi, root_id, invocations_uri, 0, cors_options - ) + self.create_and_setup_cors(restapi, root_id, invocations_uri, 0, cors_options) resource = troposphere.apigateway.Resource("ResourceAnyPathSlashed") self.cf_api_resources.append(resource.title) @@ -1879,9 +1762,7 @@ def create_api_gateway_routes( ) # pragma: no cover if cors_options: - self.create_and_setup_cors( - restapi, resource, invocations_uri, 1, cors_options - ) # pragma: no cover + self.create_and_setup_cors(restapi, resource, invocations_uri, 1, cors_options) # pragma: no cover return restapi def create_authorizer(self, restapi, uri, authorizer): @@ -1896,20 +1777,14 @@ def create_authorizer(self, restapi, uri, authorizer): authorizer_resource.Name = authorizer.get("name", "ZappaAuthorizer") authorizer_resource.Type = authorizer_type authorizer_resource.AuthorizerUri = uri - authorizer_resource.IdentitySource = ( - "method.request.header.%s" % authorizer.get("token_header", "Authorization") - ) + authorizer_resource.IdentitySource = "method.request.header.%s" % authorizer.get("token_header", "Authorization") if identity_validation_expression: - authorizer_resource.IdentityValidationExpression = ( - identity_validation_expression - ) + authorizer_resource.IdentityValidationExpression = identity_validation_expression if authorizer_type == "TOKEN": if not self.credentials_arn: self.get_credentials_arn() - authorizer_resource.AuthorizerResultTtlInSeconds = authorizer.get( - "result_ttl", 300 - ) + authorizer_resource.AuthorizerResultTtlInSeconds = authorizer.get("result_ttl", 300) authorizer_resource.AuthorizerCredentials = self.credentials_arn if authorizer_type == "COGNITO_USER_POOLS": authorizer_resource.ProviderARNs = authorizer.get("provider_arns") @@ -2003,9 +1878,7 @@ def create_and_setup_cors(self, restapi, resource, uri, depth, config): ), "Access-Control-Allow-Origin": "'%s'" % config.get("allowed_origin", "*"), } - method_response.ResponseParameters = { - "method.response.header.%s" % key: True for key in response_headers - } + method_response.ResponseParameters = {"method.response.header.%s" % key: True for key in response_headers} method_response.StatusCode = "200" method.MethodResponses = [method_response] self.cf_template.add_resource(method) @@ -2017,8 +1890,7 @@ def create_and_setup_cors(self, restapi, resource, uri, depth, config): integration.RequestTemplates = {"application/json": '{"statusCode": 200}'} integration_response = troposphere.apigateway.IntegrationResponse() integration_response.ResponseParameters = { - "method.response.header.%s" % key: value - for key, value in response_headers.items() + "method.response.header.%s" % key: value for key, value in response_headers.items() } integration_response.ResponseTemplates = {"application/json": ""} integration_response.StatusCode = "200" @@ -2073,19 +1945,14 @@ def deploy_api_gateway( ], ) - return "https://{}.execute-api.{}.amazonaws.com/{}".format( - api_id, self.boto_session.region_name, stage_name - ) + return "https://{}.execute-api.{}.amazonaws.com/{}".format(api_id, self.boto_session.region_name, stage_name) def add_binary_support(self, api_id, cors=False): """ Add binary support """ response = self.apigateway_client.get_rest_api(restApiId=api_id) - if ( - "binaryMediaTypes" not in response - or "*/*" not in response["binaryMediaTypes"] - ): + if "binaryMediaTypes" not in response or "*/*" not in response["binaryMediaTypes"]: self.apigateway_client.update_rest_api( restApiId=api_id, patchOperations=[{"op": "add", "path": "/binaryMediaTypes/*~1*"}], @@ -2095,11 +1962,7 @@ def add_binary_support(self, api_id, cors=False): # fix for issue 699 and 1035, cors+binary support don't work together # go through each resource and update the contentHandling type response = self.apigateway_client.get_resources(restApiId=api_id) - resource_ids = [ - item["id"] - for item in response["items"] - if "OPTIONS" in item.get("resourceMethods", {}) - ] + resource_ids = [item["id"] for item in response["items"] if "OPTIONS" in item.get("resourceMethods", {})] for resource_id in resource_ids: self.apigateway_client.update_integration( @@ -2128,20 +1991,14 @@ def remove_binary_support(self, api_id, cors=False): if cors: # go through each resource and change the contentHandling type response = self.apigateway_client.get_resources(restApiId=api_id) - resource_ids = [ - item["id"] - for item in response["items"] - if "OPTIONS" in item.get("resourceMethods", {}) - ] + resource_ids = [item["id"] for item in response["items"] if "OPTIONS" in item.get("resourceMethods", {})] for resource_id in resource_ids: self.apigateway_client.update_integration( restApiId=api_id, resourceId=resource_id, httpMethod="OPTIONS", - patchOperations=[ - {"op": "replace", "path": "/contentHandling", "value": ""} - ], + patchOperations=[{"op": "replace", "path": "/contentHandling", "value": ""}], ) def add_api_compression(self, api_id, min_compression_size): @@ -2204,9 +2061,7 @@ def remove_api_key(self, api_id, stage_name): """ Remove a generated API key for api_id and stage_name """ - response = self.apigateway_client.get_api_keys( - limit=1, nameQuery="{}_{}".format(stage_name, api_id) - ) + response = self.apigateway_client.get_api_keys(limit=1, nameQuery="{}_{}".format(stage_name, api_id)) for api_key in response.get("items"): self.apigateway_client.delete_api_key(apiKey="{}".format(api_key["id"])) @@ -2251,8 +2106,6 @@ def undeploy_api_gateway(self, lambda_name, domain_name=None, base_path=None): """ print("Deleting API Gateway..") - api_id = self.get_api_id(lambda_name) - if domain_name: # XXX - Remove Route53 smartly here? @@ -2263,7 +2116,7 @@ def undeploy_api_gateway(self, lambda_name, domain_name=None, base_path=None): domainName=domain_name, basePath="(none)" if base_path is None else base_path, ) - except Exception as e: + except Exception: # We may not have actually set up the domain. pass @@ -2331,22 +2184,13 @@ def update_cognito(self, lambda_name, user_pool, lambda_configs, lambda_arn): description_kwargs[key] = value if "LambdaConfig" not in description_kwargs: description_kwargs["LambdaConfig"] = LambdaConfig - if ( - "TemporaryPasswordValidityDays" - in description_kwargs["Policies"]["PasswordPolicy"] - ): - description_kwargs["AdminCreateUserConfig"].pop( - "UnusedAccountValidityDays", None - ) + if "TemporaryPasswordValidityDays" in description_kwargs["Policies"]["PasswordPolicy"]: + description_kwargs["AdminCreateUserConfig"].pop("UnusedAccountValidityDays", None) if "UnusedAccountValidityDays" in description_kwargs["AdminCreateUserConfig"]: - description_kwargs["Policies"]["PasswordPolicy"][ - "TemporaryPasswordValidityDays" - ] = description_kwargs["AdminCreateUserConfig"].pop( - "UnusedAccountValidityDays", None - ) - result = self.cognito_client.update_user_pool( - UserPoolId=user_pool, **description_kwargs - ) + description_kwargs["Policies"]["PasswordPolicy"]["TemporaryPasswordValidityDays"] = description_kwargs[ + "AdminCreateUserConfig" + ].pop("UnusedAccountValidityDays", None) + result = self.cognito_client.update_user_pool(UserPoolId=user_pool, **description_kwargs) if result["ResponseMetadata"]["HTTPStatusCode"] != 200: print("Cognito: Failed to update user pool", result) @@ -2369,7 +2213,7 @@ def delete_stack(self, name, wait=False): """ try: stack = self.cf_client.describe_stacks(StackName=name)["Stacks"][0] - except: # pragma: no cover + except Exception: # pragma: no cover print("No Zappa stack named {0}".format(name)) return False @@ -2420,7 +2264,7 @@ def create_stack_template( self.cf_api_resources = [] self.cf_parameters = {} - restapi = self.create_api_gateway_routes( + self.create_api_gateway_routes( lambda_arn, api_name=lambda_name, api_key_required=api_key_required, @@ -2456,17 +2300,11 @@ def update_stack( self.upload_to_s3(template, working_bucket, disable_progress=disable_progress) if self.boto_session.region_name == "us-gov-west-1": - url = "https://s3-us-gov-west-1.amazonaws.com/{0}/{1}".format( - working_bucket, template - ) + url = "https://s3-us-gov-west-1.amazonaws.com/{0}/{1}".format(working_bucket, template) else: url = "https://s3.amazonaws.com/{0}/{1}".format(working_bucket, template) - tags = [ - {"Key": key, "Value": self.tags[key]} - for key in self.tags.keys() - if key != "ZappaProject" - ] + tags = [{"Key": key, "Value": self.tags[key]} for key in self.tags.keys() if key != "ZappaProject"] tags.append({"Key": "ZappaProject", "Value": name}) update = True @@ -2480,12 +2318,8 @@ def update_stack( return if not update: - self.cf_client.create_stack( - StackName=name, Capabilities=capabilities, TemplateURL=url, Tags=tags - ) - print( - "Waiting for stack {0} to create (this can take a bit)..".format(name) - ) + self.cf_client.create_stack(StackName=name, Capabilities=capabilities, TemplateURL=url, Tags=tags) + print("Waiting for stack {0} to create (this can take a bit)..".format(name)) else: try: self.cf_client.update_stack( @@ -2535,11 +2369,7 @@ def update_stack( count = 0 for result in sr.paginate(StackName=name): - done = ( - 1 - for x in result["StackResourceSummaries"] - if "COMPLETE" in x["ResourceStatus"] - ) + done = (1 for x in result["StackResourceSummaries"] if "COMPLETE" in x["ResourceStatus"]) count += sum(done) if count: # We can end up in a situation where we have more resources being created @@ -2573,9 +2403,7 @@ def get_api_url(self, lambda_name, stage_name): """ api_id = self.get_api_id(lambda_name) if api_id: - return "https://{}.execute-api.{}.amazonaws.com/{}".format( - api_id, self.boto_session.region_name, stage_name - ) + return "https://{}.execute-api.{}.amazonaws.com/{}".format(api_id, self.boto_session.region_name, stage_name) else: return None @@ -2584,11 +2412,9 @@ def get_api_id(self, lambda_name): Given a lambda_name, return the API id. """ try: - response = self.cf_client.describe_stack_resource( - StackName=lambda_name, LogicalResourceId="Api" - ) + response = self.cf_client.describe_stack_resource(StackName=lambda_name, LogicalResourceId="Api") return response["StackResourceDetail"].get("PhysicalResourceId", None) - except: # pragma: no cover + except Exception: # pragma: no cover try: # Try the old method (project was probably made on an older, non CF version) response = self.apigateway_client.get_rest_apis(limit=500) @@ -2599,7 +2425,7 @@ def get_api_id(self, lambda_name): logger.exception("Could not get API ID.") return None - except: # pragma: no cover + except Exception: # pragma: no cover # We don't even have an API deployed. That's okay! return None @@ -2655,10 +2481,7 @@ def update_route53_records(self, domain_name, dns_name): """ zone_id = self.get_hosted_zone_id_for_domain(domain_name) - is_apex = ( - self.route53.get_hosted_zone(Id=zone_id)["HostedZone"]["Name"][:-1] - == domain_name - ) + is_apex = self.route53.get_hosted_zone(Id=zone_id)["HostedZone"]["Name"][:-1] == domain_name if is_apex: record_set = { "Name": domain_name, @@ -2687,9 +2510,7 @@ def update_route53_records(self, domain_name, dns_name): # but the alias target name does not lie within the target zone response = self.route53.change_resource_record_sets( HostedZoneId=zone_id, - ChangeBatch={ - "Changes": [{"Action": "UPSERT", "ResourceRecordSet": record_set}] - }, + ChangeBatch={"Changes": [{"Action": "UPSERT", "ResourceRecordSet": record_set}]}, ) return response @@ -2724,16 +2545,7 @@ def update_domain_name( print("Updating domain name!") certificate_name = certificate_name + str(time.time()) - - api_gateway_domain = self.apigateway_client.get_domain_name( - domainName=domain_name - ) - if ( - not certificate_arn - and certificate_body - and certificate_private_key - and certificate_chain - ): + if not certificate_arn and certificate_body and certificate_private_key and certificate_chain: acm_certificate = self.acm_client.import_certificate( Certificate=certificate_body, PrivateKey=certificate_private_key, @@ -2755,9 +2567,7 @@ def update_domain_name( ], ) - def update_domain_base_path_mapping( - self, domain_name, lambda_name, stage, base_path - ): + def update_domain_base_path_mapping(self, domain_name, lambda_name, stage, base_path): """ Update domain base path mapping on API Gateway if it was changed """ @@ -2765,15 +2575,10 @@ def update_domain_base_path_mapping( if not api_id: print("Warning! Can't update base path mapping!") return - base_path_mappings = self.apigateway_client.get_base_path_mappings( - domainName=domain_name - ) + base_path_mappings = self.apigateway_client.get_base_path_mappings(domainName=domain_name) found = False for base_path_mapping in base_path_mappings.get("items", []): - if ( - base_path_mapping["restApiId"] == api_id - and base_path_mapping["stage"] == stage - ): + if base_path_mapping["restApiId"] == api_id and base_path_mapping["stage"] == stage: found = True if base_path_mapping["basePath"] != base_path: self.apigateway_client.update_base_path_mapping( @@ -2802,9 +2607,7 @@ def get_all_zones(self): new_zones = self.route53.list_hosted_zones(MaxItems="100") while new_zones["IsTruncated"]: zones["HostedZones"] += new_zones["HostedZones"] - new_zones = self.route53.list_hosted_zones( - Marker=new_zones["NextMarker"], MaxItems="100" - ) + new_zones = self.route53.list_hosted_zones(Marker=new_zones["NextMarker"], MaxItems="100") zones["HostedZones"] += new_zones["HostedZones"] return zones @@ -2826,17 +2629,12 @@ def get_domain_name(self, domain_name, route53=True): try: zones = self.get_all_zones() for zone in zones["HostedZones"]: - records = self.route53.list_resource_record_sets( - HostedZoneId=zone["Id"] - ) + records = self.route53.list_resource_record_sets(HostedZoneId=zone["Id"]) for record in records["ResourceRecordSets"]: - if ( - record["Type"] in ("CNAME", "A") - and record["Name"][:-1] == domain_name - ): + if record["Type"] in ("CNAME", "A") and record["Name"][:-1] == domain_name: return record - except Exception as e: + except Exception: return None ## @@ -2889,9 +2687,7 @@ def create_iam_roles(self): except botocore.client.ClientError: print("Creating " + self.role_name + " IAM Role..") - role = self.iam.create_role( - RoleName=self.role_name, AssumeRolePolicyDocument=self.assume_policy - ) + role = self.iam.create_role(RoleName=self.role_name, AssumeRolePolicyDocument=self.assume_policy) self.credentials_arn = role.arn updated = True @@ -2899,19 +2695,13 @@ def create_iam_roles(self): policy = self.iam.RolePolicy(self.role_name, "zappa-permissions") try: if policy.policy_document != attach_policy_obj: - print( - "Updating zappa-permissions policy on " - + self.role_name - + " IAM Role." - ) + print("Updating zappa-permissions policy on " + self.role_name + " IAM Role.") policy.put(PolicyDocument=self.attach_policy) updated = True except botocore.client.ClientError: - print( - "Creating zappa-permissions policy on " + self.role_name + " IAM Role." - ) + print("Creating zappa-permissions policy on " + self.role_name + " IAM Role.") policy.put(PolicyDocument=self.attach_policy) updated = True @@ -2919,9 +2709,7 @@ def create_iam_roles(self): role.assume_role_policy_document["Statement"][0]["Principal"]["Service"] ) != set(assume_policy_obj["Statement"][0]["Principal"]["Service"]): print("Updating assume role policy on " + self.role_name + " IAM Role.") - self.iam_client.update_assume_role_policy( - RoleName=self.role_name, PolicyDocument=self.assume_policy - ) + self.iam_client.update_assume_role_policy(RoleName=self.role_name, PolicyDocument=self.assume_policy) updated = True return self.credentials_arn, updated @@ -2935,19 +2723,11 @@ def _clear_policy(self, lambda_name): if policy_response["ResponseMetadata"]["HTTPStatusCode"] == 200: statement = json.loads(policy_response["Policy"])["Statement"] for s in statement: - delete_response = self.lambda_client.remove_permission( - FunctionName=lambda_name, StatementId=s["Sid"] - ) + delete_response = self.lambda_client.remove_permission(FunctionName=lambda_name, StatementId=s["Sid"]) if delete_response["ResponseMetadata"]["HTTPStatusCode"] != 204: - logger.error( - "Failed to delete an obsolete policy statement: {}".format( - policy_response - ) - ) + logger.error("Failed to delete an obsolete policy statement: {}".format(policy_response)) else: - logger.debug( - "Failed to load Lambda function policy: {}".format(policy_response) - ) + logger.debug("Failed to load Lambda function policy: {}".format(policy_response)) except ClientError as e: if e.args[0].find("ResourceNotFoundException") > -1: logger.debug("No policy found, must be first run.") @@ -2963,17 +2743,13 @@ def create_event_permission(self, lambda_name, principal, source_arn): Create permissions to link to an event. Related: http://docs.aws.amazon.com/lambda/latest/dg/with-s3-example-configure-event-source.html """ - logger.debug( - "Adding new permission to invoke Lambda function: {}".format(lambda_name) - ) + logger.debug("Adding new permission to invoke Lambda function: {}".format(lambda_name)) account_id: str = self.sts_client.get_caller_identity().get("Account") permission_response = self.lambda_client.add_permission( FunctionName=lambda_name, - StatementId="".join( - random.choice(string.ascii_uppercase + string.digits) for _ in range(8) - ), + StatementId="".join(random.choice(string.ascii_uppercase + string.digits) for _ in range(8)), Action="lambda:InvokeFunction", Principal=principal, SourceArn=source_arn, @@ -3020,9 +2796,7 @@ def schedule_events(self, lambda_arn, lambda_name, events, default=True): function = event["function"] expression = event.get("expression", None) # single expression expressions = event.get("expressions", None) # multiple expression - kwargs = event.get( - "kwargs", {} - ) # optional dict of keyword arguments for the event + kwargs = event.get("kwargs", {}) # optional dict of keyword arguments for the event event_source = event.get("event_source", None) description = event.get("description", function) @@ -3034,9 +2808,7 @@ def schedule_events(self, lambda_arn, lambda_name, events, default=True): self.get_credentials_arn() if expression: - expressions = [ - expression - ] # same code for single and multiple expression + expressions = [expression] # same code for single and multiple expression if expressions: for index, expression in enumerate(expressions): @@ -3056,14 +2828,10 @@ def schedule_events(self, lambda_arn, lambda_name, events, default=True): ) if "RuleArn" in rule_response: - logger.debug( - "Rule created. ARN {}".format(rule_response["RuleArn"]) - ) + logger.debug("Rule created. ARN {}".format(rule_response["RuleArn"])) # Specific permissions are necessary for any trigger to work. - self.create_event_permission( - lambda_name, "events.amazonaws.com", rule_response["RuleArn"] - ) + self.create_event_permission(lambda_name, "events.amazonaws.com", rule_response["RuleArn"]) # Overwriting the input, supply the original values and add kwargs input_template = ( @@ -3086,10 +2854,7 @@ def schedule_events(self, lambda_arn, lambda_name, events, default=True): Rule=rule_name, Targets=[ { - "Id": "Id" - + "".join( - random.choice(string.digits) for _ in range(12) - ), + "Id": "Id" + "".join(random.choice(string.digits) for _ in range(12)), "Arn": lambda_arn, "InputTransformer": { "InputPathsMap": { @@ -3110,17 +2875,9 @@ def schedule_events(self, lambda_arn, lambda_name, events, default=True): ) if target_response["ResponseMetadata"]["HTTPStatusCode"] == 200: - print( - "Scheduled {} with expression {}!".format( - rule_name, expression - ) - ) + print("Scheduled {} with expression {}!".format(rule_name, expression)) else: - print( - "Problem scheduling {} with expression {}.".format( - rule_name, expression - ) - ) + print("Problem scheduling {} with expression {}.".format(rule_name, expression)) elif event_source: service = self.service_from_arn(event_source["arn"]) @@ -3135,30 +2892,16 @@ def schedule_events(self, lambda_arn, lambda_name, events, default=True): else: svc = service - rule_response = add_event_source( - event_source, lambda_arn, function, self.boto_session - ) + rule_response = add_event_source(event_source, lambda_arn, function, self.boto_session) if rule_response == "successful": print("Created {} event schedule for {}!".format(svc, function)) elif rule_response == "failed": - print( - "Problem creating {} event schedule for {}!".format( - svc, function - ) - ) + print("Problem creating {} event schedule for {}!".format(svc, function)) elif rule_response == "exists": - print( - "{} event schedule for {} already exists - Nothing to do here.".format( - svc, function - ) - ) + print("{} event schedule for {} already exists - Nothing to do here.".format(svc, function)) elif rule_response == "dryrun": - print( - "Dryrun for creating {} event schedule for {}!!".format( - svc, function - ) - ) + print("Dryrun for creating {} event schedule for {}!!".format(svc, function)) else: print( "Could not create event {} - Please define either an expression or an event source".format( @@ -3196,9 +2939,7 @@ def get_event_name(lambda_name, name): """ Returns an AWS-valid Lambda event name. """ - return "{prefix:.{width}}-{postfix}".format( - prefix=lambda_name, width=max(0, 63 - len(name)), postfix=name - )[:64] + return "{prefix:.{width}}-{postfix}".format(prefix=lambda_name, width=max(0, 63 - len(name)), postfix=name)[:64] @staticmethod def get_hashed_lambda_name(lambda_name): @@ -3222,15 +2963,11 @@ def delete_rule(self, rule_name): if error_code == "AccessDeniedException": raise else: - logger.debug( - "No target found for this rule: {} {}".format(rule_name, e.args[0]) - ) + logger.debug("No target found for this rule: {} {}".format(rule_name, e.args[0])) return if "Targets" in targets and targets["Targets"]: - self.events_client.remove_targets( - Rule=rule_name, Ids=[x["Id"] for x in targets["Targets"]] - ) + self.events_client.remove_targets(Rule=rule_name, Ids=[x["Id"] for x in targets["Targets"]]) else: # pragma: no cover logger.debug("No target to delete") @@ -3245,9 +2982,7 @@ def get_event_rule_names_for_lambda(self, lambda_arn): rule_names = response["RuleNames"] # Iterate when the results are paginated while "NextToken" in response: - response = self.events_client.list_rule_names_by_target( - TargetArn=lambda_arn, NextToken=response["NextToken"] - ) + response = self.events_client.list_rule_names_by_target(TargetArn=lambda_arn, NextToken=response["NextToken"]) rule_names.extend(response["RuleNames"]) return rule_names @@ -3258,9 +2993,7 @@ def get_event_rules_for_lambda(self, lambda_arn): rule_names = self.get_event_rule_names_for_lambda(lambda_arn=lambda_arn) return [self.events_client.describe_rule(Name=r) for r in rule_names] - def unschedule_events( - self, events, lambda_arn=None, lambda_name=None, excluded_source_services=None - ): + def unschedule_events(self, events, lambda_arn=None, lambda_name=None, excluded_source_services=None): excluded_source_services = excluded_source_services or [] """ Given a list of events, unschedule these CloudWatch Events. @@ -3287,15 +3020,11 @@ def unschedule_events( # re-scheduled when a new Lambda function is deployed. Therefore, they should not be removed during zappa # update or zappa schedule. if service not in excluded_source_services: - remove_event_source( - event_source, lambda_arn, function, self.boto_session - ) + remove_event_source(event_source, lambda_arn, function, self.boto_session) print( "Removed event {}{}.".format( name, - " ({})".format(str(event_source["events"])) - if "events" in event_source - else "", + " ({})".format(str(event_source["events"])) if "events" in event_source else "", ) ) @@ -3311,13 +3040,9 @@ def create_async_sns_topic(self, lambda_name, lambda_arn): # Create SNS topic topic_arn = self.sns_client.create_topic(Name=topic_name)["TopicArn"] # Create subscription - self.sns_client.subscribe( - TopicArn=topic_arn, Protocol="lambda", Endpoint=lambda_arn - ) + self.sns_client.subscribe(TopicArn=topic_arn, Protocol="lambda", Endpoint=lambda_arn) # Add Lambda permission for SNS to invoke function - self.create_event_permission( - lambda_name=lambda_name, principal="sns.amazonaws.com", source_arn=topic_arn - ) + self.create_event_permission(lambda_name=lambda_name, principal="sns.amazonaws.com", source_arn=topic_arn) # Add rule for SNS topic as a event source add_event_source( event_source={"arn": topic_arn, "events": ["sns:Publish"]}, @@ -3395,9 +3120,7 @@ def fetch_logs(self, lambda_name, filter_pattern="", limit=10000, start_time=0): Fetch the CloudWatch logs for a given Lambda name. """ log_name = "/aws/lambda/" + lambda_name - streams = self.logs_client.describe_log_streams( - logGroupName=log_name, descending=True, orderBy="LastEventTime" - ) + streams = self.logs_client.describe_log_streams(logGroupName=log_name, descending=True, orderBy="LastEventTime") all_streams = streams["logStreams"] all_names = [stream["logStreamName"] for stream in all_streams] @@ -3450,14 +3173,8 @@ def remove_api_gateway_logs(self, project_name): Removed all logs that are assigned to a given rest api id. """ for rest_api in self.get_rest_apis(project_name): - for stage in self.apigateway_client.get_stages(restApiId=rest_api["id"])[ - "item" - ]: - self.remove_log_group( - "API-Gateway-Execution-Logs_{}/{}".format( - rest_api["id"], stage["stageName"] - ) - ) + for stage in self.apigateway_client.get_stages(restApiId=rest_api["id"])["item"]: + self.remove_log_group("API-Gateway-Execution-Logs_{}/{}".format(rest_api["id"], stage["stageName"])) ## # Route53 Domain Name Entries @@ -3475,21 +3192,11 @@ def get_best_match_zone(all_zones, domain): """Return zone id which name is closer matched with domain name.""" # Related: https://github.com/Miserlou/Zappa/issues/459 - public_zones = [ - zone - for zone in all_zones["HostedZones"] - if not zone["Config"]["PrivateZone"] - ] + public_zones = [zone for zone in all_zones["HostedZones"] if not zone["Config"]["PrivateZone"]] - zones = { - zone["Name"][:-1]: zone["Id"] - for zone in public_zones - if zone["Name"][:-1] in domain - } + zones = {zone["Name"][:-1]: zone["Id"] for zone in public_zones if zone["Name"][:-1] in domain} if zones: - keys = max( - zones.keys(), key=lambda a: len(a) - ) # get longest key -- best match. + keys = max(zones.keys(), key=lambda a: len(a)) # get longest key -- best match. return zones[keys] else: return None @@ -3501,9 +3208,7 @@ def set_dns_challenge_txt(self, zone_id, domain, txt_challenge): print("Setting DNS challenge..") resp = self.route53.change_resource_record_sets( HostedZoneId=zone_id, - ChangeBatch=self.get_dns_challenge_change_batch( - "UPSERT", domain, txt_challenge - ), + ChangeBatch=self.get_dns_challenge_change_batch("UPSERT", domain, txt_challenge), ) return resp @@ -3515,9 +3220,7 @@ def remove_dns_challenge_txt(self, zone_id, domain, txt_challenge): print("Deleting DNS challenge..") resp = self.route53.change_resource_record_sets( HostedZoneId=zone_id, - ChangeBatch=self.get_dns_challenge_change_batch( - "DELETE", domain, txt_challenge - ), + ChangeBatch=self.get_dns_challenge_change_batch("DELETE", domain, txt_challenge), ) return resp @@ -3570,12 +3273,8 @@ def load_credentials(self, boto_session=None, profile_name=None): # If provided, use the supplied profile name. if profile_name: - self.boto_session = boto3.Session( - profile_name=profile_name, region_name=self.aws_region - ) - elif os.environ.get("AWS_ACCESS_KEY_ID") and os.environ.get( - "AWS_SECRET_ACCESS_KEY" - ): + self.boto_session = boto3.Session(profile_name=profile_name, region_name=self.aws_region) + elif os.environ.get("AWS_ACCESS_KEY_ID") and os.environ.get("AWS_SECRET_ACCESS_KEY"): region_name = os.environ.get("AWS_DEFAULT_REGION") or self.aws_region session_kw = { "aws_access_key_id": os.environ.get("AWS_ACCESS_KEY_ID"), @@ -3585,9 +3284,7 @@ def load_credentials(self, boto_session=None, profile_name=None): # If we're executing in a role, AWS_SESSION_TOKEN will be present, too. if os.environ.get("AWS_SESSION_TOKEN"): - session_kw["aws_session_token"] = os.environ.get( - "AWS_SESSION_TOKEN" - ) + session_kw["aws_session_token"] = os.environ.get("AWS_SESSION_TOKEN") self.boto_session = boto3.Session(**session_kw) else: diff --git a/zappa/handler.py b/zappa/handler.py index f7c82b635..ed0cc9835 100644 --- a/zappa/handler.py +++ b/zappa/handler.py @@ -20,7 +20,7 @@ from zappa.middleware import ZappaWSGIMiddleware from zappa.utilities import merge_headers, parse_s3_url from zappa.wsgi import common_log, create_wsgi_request -except ImportError as e: # pragma: no cover +except ImportError: # pragma: no cover from .middleware import ZappaWSGIMiddleware from .utilities import merge_headers, parse_s3_url from .wsgi import common_log, create_wsgi_request @@ -103,26 +103,19 @@ def __init__(self, settings_name="zappa_settings", session=None): if is_slim_handler: included_libraries = getattr(self.settings, "INCLUDE", []) try: - from ctypes import cdll, util + from ctypes import cdll for library in included_libraries: try: cdll.LoadLibrary(os.path.join(os.getcwd(), library)) except OSError: - print( - "Failed to find library: {}...right filename?".format( - library - ) - ) + print("Failed to find library: {}...right filename?".format(library)) except ImportError: print("Failed to import cytpes library") # This is a non-WSGI application # https://github.com/Miserlou/Zappa/pull/748 - if ( - not hasattr(self.settings, "APP_MODULE") - and not self.settings.DJANGO_SETTINGS - ): + if not hasattr(self.settings, "APP_MODULE") and not self.settings.DJANGO_SETTINGS: self.app_module = None wsgi_app_function = None # This is probably a normal WSGI app (Or django with overloaded wsgi application) @@ -136,9 +129,7 @@ def __init__(self, settings_name="zappa_settings", session=None): # add the Lambda root path into the sys.path self.trailing_slash = True - os.environ[ - SETTINGS_ENVIRONMENT_VARIABLE - ] = self.settings.DJANGO_SETTINGS + os.environ[SETTINGS_ENVIRONMENT_VARIABLE] = self.settings.DJANGO_SETTINGS else: self.trailing_slash = False @@ -283,9 +274,7 @@ def run_function(app_function, event, context): # getargspec does not support python 3 method with type hints # Related issue: https://github.com/Miserlou/Zappa/issues/1452 if hasattr(inspect, "getfullargspec"): # Python 3 - args, varargs, keywords, defaults, _, _, _ = inspect.getfullargspec( - app_function - ) + args, varargs, keywords, defaults, _, _, _ = inspect.getfullargspec(app_function) else: # Python 2 args, varargs, keywords, defaults = inspect.getargspec(app_function) num_args = len(args) @@ -297,8 +286,7 @@ def run_function(app_function, event, context): result = app_function(event, context) else: raise RuntimeError( - "Function signature is invalid. Expected a function that accepts at most " - "2 arguments or varargs." + "Function signature is invalid. Expected a function that accepts at most " "2 arguments or varargs." ) return result @@ -341,9 +329,7 @@ def get_function_from_bot_intent_trigger(self, event): if intent: intent = intent.get("name") if intent: - return self.settings.AWS_BOT_EVENT_MAPPING.get( - "{}:{}".format(intent, event.get("invocationSource")) - ) + return self.settings.AWS_BOT_EVENT_MAPPING.get("{}:{}".format(intent, event.get("invocationSource"))) def get_function_for_cognito_trigger(self, trigger): """ @@ -417,7 +403,7 @@ def handler(self, event, context): try: # Support both for tests from zappa.ext.django_zappa import get_django_wsgi - except ImportError as e: # pragma: no cover + except ImportError: # pragma: no cover from django_zappa_app import get_django_wsgi # Get the Django WSGI app from our extension @@ -464,9 +450,7 @@ def handler(self, event, context): policy = self.run_function(app_function, event, context) return policy else: - logger.error( - "Cannot find a function to process the authorization request." - ) + logger.error("Cannot find a function to process the authorization request.") raise Exception("Unauthorized") # This is an AWS Cognito Trigger Event @@ -479,11 +463,7 @@ def handler(self, event, context): result = self.run_function(app_function, event, context) logger.debug(result) else: - logger.error( - "Cannot find a function to handle cognito trigger {}".format( - triggerSource - ) - ) + logger.error("Cannot find a function to handle cognito trigger {}".format(triggerSource)) return result # This is a CloudWatch event @@ -510,9 +490,7 @@ def handler(self, event, context): script_name = "" is_elb_context = False headers = merge_headers(event) - if event.get("requestContext", None) and event["requestContext"].get( - "elb", None - ): + if event.get("requestContext", None) and event["requestContext"].get("elb", None): # Related: https://github.com/Miserlou/Zappa/issues/1715 # inputs/outputs for lambda loadbalancer # https://docs.aws.amazon.com/elasticloadbalancing/latest/application/lambda-functions.html @@ -576,9 +554,7 @@ def handler(self, event, context): # base64 encoding and status description if is_elb_context: zappa_returndict.setdefault("isBase64Encoded", False) - zappa_returndict.setdefault( - "statusDescription", response.status - ) + zappa_returndict.setdefault("statusDescription", response.status) if response.data: if ( @@ -586,9 +562,7 @@ def handler(self, event, context): and not response.mimetype.startswith("text/") and response.mimetype != "application/json" ): - zappa_returndict["body"] = base64.b64encode( - response.data - ).decode("utf-8") + zappa_returndict["body"] = base64.b64encode(response.data).decode("utf-8") zappa_returndict["isBase64Encoded"] = True else: zappa_returndict["body"] = response.get_data(as_text=True) @@ -601,9 +575,7 @@ def handler(self, event, context): if "multiValueHeaders" in event: zappa_returndict["multiValueHeaders"] = {} for key, value in response.headers: - zappa_returndict["multiValueHeaders"][ - key - ] = response.headers.getlist(key) + zappa_returndict["multiValueHeaders"][key] = response.headers.getlist(key) # Calculate the total response time, # and log it in the Common Log format. @@ -644,9 +616,7 @@ def handler(self, event, context): content["statusCode"] = 500 body = {"message": message} if settings.DEBUG: # only include traceback if debug is on. - body["traceback"] = traceback.format_exception( - *exc_info - ) # traceback as a list for readability. + body["traceback"] = traceback.format_exception(*exc_info) # traceback as a list for readability. content["body"] = json.dumps(str(body), sort_keys=True, indent=4) return content @@ -657,9 +627,7 @@ def lambda_handler(event, context): # pragma: no cover def keep_warm_callback(event, context): """Method is triggered by the CloudWatch event scheduled when keep_warm setting is set to true.""" - lambda_handler( - event={}, context=context - ) # overriding event with an empty one so that web app initialization will + lambda_handler(event={}, context=context) # overriding event with an empty one so that web app initialization will # be triggered. diff --git a/zappa/letsencrypt.py b/zappa/letsencrypt.py index 9ecd63f02..671e45d14 100755 --- a/zappa/letsencrypt.py +++ b/zappa/letsencrypt.py @@ -21,7 +21,6 @@ import re import shutil import subprocess -import sys import tempfile import textwrap import time @@ -80,7 +79,10 @@ def get_cert_and_update_domain( stage=api_stage, ) print( - "Created a new domain name. Please note that it can take up to 40 minutes for this domain to be created and propagated through AWS, but it requires no further work on your part." + "Created a new domain name. " + "Please note that it can take up to 40 minutes " + "for this domain to be created and propagated through AWS, " + "but it requires no further work on your part." ) else: zappa_instance.update_domain_name( @@ -213,9 +215,7 @@ def get_boulder_header(key_bytes): "jwk": { "e": _b64(binascii.unhexlify(pub_exp.encode("utf-8"))), "kty": "RSA", - "n": _b64( - binascii.unhexlify(re.sub(r"(\s|:)", "", pub_hex).encode("utf-8")) - ), + "n": _b64(binascii.unhexlify(re.sub(r"(\s|:)", "", pub_hex).encode("utf-8"))), }, } @@ -270,15 +270,9 @@ def get_cert(zappa_instance, log=LOGGER, CA=DEFAULT_CA): }, ) if code != 201: - raise ValueError( - "Error requesting challenges: {0} {1}".format(code, result) - ) - - challenge = [ - ch - for ch in json.loads(result.decode("utf8"))["challenges"] - if ch["type"] == "dns-01" - ][0] + raise ValueError("Error requesting challenges: {0} {1}".format(code, result)) + + challenge = [ch for ch in json.loads(result.decode("utf8"))["challenges"] if ch["type"] == "dns-01"][0] token = re.sub(r"[^A-Za-z0-9_\-]", "_", challenge["token"]) keyauthorization = "{0}.{1}".format(token, thumbprint).encode("utf-8") @@ -330,20 +324,14 @@ def verify_challenge(uri): resp = urlopen(uri) challenge_status = json.loads(resp.read().decode("utf8")) except IOError as e: - raise ValueError( - "Error checking challenge: {0} {1}".format( - e.code, json.loads(e.read().decode("utf8")) - ) - ) + raise ValueError("Error checking challenge: {0} {1}".format(e.code, json.loads(e.read().decode("utf8")))) if challenge_status["status"] == "pending": time.sleep(2) elif challenge_status["status"] == "valid": LOGGER.info("Domain verified!") break else: - raise ValueError( - "Domain challenge did not pass: {0}".format(challenge_status) - ) + raise ValueError("Domain challenge did not pass: {0}".format(challenge_status)) def sign_certificate(): @@ -381,10 +369,8 @@ def encode_certificate(result): """ Encode cert bytes to PEM encoded cert file. """ - cert_body = ( - """-----BEGIN CERTIFICATE-----\n{0}\n-----END CERTIFICATE-----\n""".format( - "\n".join(textwrap.wrap(base64.b64encode(result).decode("utf8"), 64)) - ) + cert_body = """-----BEGIN CERTIFICATE-----\n{0}\n-----END CERTIFICATE-----\n""".format( + "\n".join(textwrap.wrap(base64.b64encode(result).decode("utf8"), 64)) ) signed_crt = open("{}/signed.crt".format(gettempdir()), "w") signed_crt.write(cert_body) @@ -424,9 +410,7 @@ def _send_signed_request(url, payload): "-sign", os.path.join(gettempdir(), "account.key"), ] - proc = subprocess.Popen( - cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE - ) + proc = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = proc.communicate("{0}.{1}".format(protected64, payload64).encode("utf8")) if proc.returncode != 0: # pragma: no cover raise IOError("OpenSSL Error: {0}".format(err)) diff --git a/zappa/middleware.py b/zappa/middleware.py index 2f7bb2bfe..920db0c94 100644 --- a/zappa/middleware.py +++ b/zappa/middleware.py @@ -44,11 +44,7 @@ def encode_response(status, headers, exc_info=None): Related: https://github.com/Miserlou/Zappa/issues/1965 """ - new_headers = [ - header - for header in headers - if ((type(header[0]) != str) or (header[0].lower() != "set-cookie")) - ] + new_headers = [header for header in headers if ((type(header[0]) != str) or (header[0].lower() != "set-cookie"))] cookie_headers = [ (header[0].lower(), header[1]) for header in headers diff --git a/zappa/utilities.py b/zappa/utilities.py index d146971e7..72ad9f0f7 100644 --- a/zappa/utilities.py +++ b/zappa/utilities.py @@ -13,7 +13,6 @@ import botocore import durationpy -from past.builtins import basestring LOG = logging.getLogger(__name__) @@ -44,7 +43,7 @@ def copy_file(src, dst, item): st = os.lstat(s) mode = stat.S_IMODE(st.st_mode) os.lchmod(d, mode) - except: + except Exception: pass # lchmod not available elif os.path.isdir(s): copytree(s, d, metadata, symlinks, ignore) @@ -105,16 +104,14 @@ def string_to_timestamp(timestring): # Uses an extended version of Go's duration string. try: delta = durationpy.from_str(timestring) - past = datetime.datetime.utcnow() - delta + past = datetime.datetime.now(datetime.timezone.utc) - delta ts = calendar.timegm(past.timetuple()) return ts - except Exception as e: + except Exception: pass if ts: return ts - # else: - # print("Unable to parse timestring.") return 0 @@ -137,9 +134,7 @@ def detect_django_settings(): continue full = os.path.join(root, filename) package_path = full.replace(os.getcwd(), "") - package_module = ( - package_path.replace(os.sep, ".").split(".", 1)[1].replace(".py", "") - ) + package_module = package_path.replace(os.sep, ".").split(".", 1)[1].replace(".py", "") matches.append(package_module) return matches @@ -175,11 +170,7 @@ def detect_flask_apps(): continue package_path = full.replace(os.getcwd(), "") - package_module = ( - package_path.replace(os.sep, ".") - .split(".", 1)[1] - .replace(".py", "") - ) + package_module = package_path.replace(os.sep, ".").split(".", 1)[1].replace(".py", "") app_module = package_module + "." + app matches.append(app_module) @@ -196,9 +187,7 @@ def get_runtime_from_python_version(): if sys.version_info[0] < 3: raise ValueError("Python 2.x is no longer supported.") else: - if sys.version_info[1] <= 6: - return "python3.6" - elif sys.version_info[1] <= 7: + if sys.version_info[1] <= 7: return "python3.7" elif sys.version_info[1] <= 8: return "python3.8" @@ -221,9 +210,7 @@ def get_topic_name(lambda_name): ## -def get_event_source( - event_source, lambda_arn, target_function, boto_session, dry=False -): +def get_event_source(event_source, lambda_arn, target_function, boto_session, dry=False): """ Given an event_source dictionary item, a session and a lambda_arn, @@ -335,9 +322,7 @@ def status(self, function): uuid = self._get_uuid(function) if uuid: try: - response = self._lambda.call( - "get_event_source_mapping", UUID=self._get_uuid(function) - ) + response = self._lambda.call("get_event_source_mapping", UUID=self._get_uuid(function)) LOG.debug(response) except botocore.exceptions.ClientError: LOG.debug("event source %s does not exist", self.arn) @@ -363,9 +348,7 @@ def add_filters(self, function): ) kappa.event_source.sns.LOG.debug(response) except Exception: - kappa.event_source.sns.LOG.exception( - "Unable to add filters for SNS topic %s", self.arn - ) + kappa.event_source.sns.LOG.exception("Unable to add filters for SNS topic %s", self.arn) def add(self, function): super().add(function) @@ -424,16 +407,12 @@ def autoreturn(self, function_name): return event_source_obj, ctx, funk -def add_event_source( - event_source, lambda_arn, target_function, boto_session, dry=False -): +def add_event_source(event_source, lambda_arn, target_function, boto_session, dry=False): """ Given an event_source dictionary, create the object and add the event source. """ - event_source_obj, ctx, funk = get_event_source( - event_source, lambda_arn, target_function, boto_session, dry=False - ) + event_source_obj, ctx, funk = get_event_source(event_source, lambda_arn, target_function, boto_session, dry=False) # TODO: Detect changes in config and refine exists algorithm if not dry: if not event_source_obj.status(funk): @@ -445,16 +424,12 @@ def add_event_source( return "dryrun" -def remove_event_source( - event_source, lambda_arn, target_function, boto_session, dry=False -): +def remove_event_source(event_source, lambda_arn, target_function, boto_session, dry=False): """ Given an event_source dictionary, create the object and remove the event source. """ - event_source_obj, ctx, funk = get_event_source( - event_source, lambda_arn, target_function, boto_session, dry=False - ) + event_source_obj, ctx, funk = get_event_source(event_source, lambda_arn, target_function, boto_session, dry=False) # This is slightly dirty, but necessary for using Kappa this way. funk.arn = lambda_arn @@ -465,16 +440,12 @@ def remove_event_source( return event_source_obj -def get_event_source_status( - event_source, lambda_arn, target_function, boto_session, dry=False -): +def get_event_source_status(event_source, lambda_arn, target_function, boto_session, dry=False): """ Given an event_source dictionary, create the object and get the event source status. """ - event_source_obj, ctx, funk = get_event_source( - event_source, lambda_arn, target_function, boto_session, dry=False - ) + event_source_obj, ctx, funk = get_event_source(event_source, lambda_arn, target_function, boto_session, dry=False) return event_source_obj.status(funk) @@ -520,7 +491,7 @@ def validate_name(name, maxlen=80): Return: the name Raise: InvalidAwsLambdaName, if the name is invalid. """ - if not isinstance(name, basestring): + if not isinstance(name, str): msg = "Name must be of type string" raise InvalidAwsLambdaName(msg) if len(name) > maxlen: @@ -540,20 +511,12 @@ def contains_python_files_or_subdirs(folder): Checks (recursively) if the directory contains .py or .pyc files """ for root, dirs, files in os.walk(folder): - if [ - filename - for filename in files - if filename.endswith(".py") or filename.endswith(".pyc") - ]: + if [filename for filename in files if filename.endswith(".py") or filename.endswith(".pyc")]: return True for d in dirs: for _, subdirs, subfiles in os.walk(d): - if [ - filename - for filename in subfiles - if filename.endswith(".py") or filename.endswith(".pyc") - ]: + if [filename for filename in subfiles if filename.endswith(".py") or filename.endswith(".pyc")]: return True return False @@ -580,7 +543,8 @@ def titlecase_keys(d): # https://github.com/Miserlou/Zappa/issues/1688 def is_valid_bucket_name(name): """ - Checks if an S3 bucket name is valid according to https://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html#bucketnamingrules + Checks if an S3 bucket name is valid according to: + https://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html#bucketnamingrules """ # Bucket names must be at least 3 and no more than 63 characters long. if len(name) < 3 or len(name) > 63: diff --git a/zappa/wsgi.py b/zappa/wsgi.py index 63880e3fa..d2300bc3b 100644 --- a/zappa/wsgi.py +++ b/zappa/wsgi.py @@ -26,9 +26,7 @@ def create_wsgi_request( create and return a valid WSGI request environ. """ method = event_info["httpMethod"] - headers = ( - merge_headers(event_info) or {} - ) # Allow for the AGW console 'Test' button to work (Pull #735) + headers = merge_headers(event_info) or {} # Allow for the AGW console 'Test' button to work (Pull #735) """ API Gateway and ALB both started allowing for multi-value querystring