diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6f03ed8dba3f7..e3cc7ec5f5001 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -551,12 +551,12 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" - name: "Install and test provider packages and airflow via wheel files" run: ./scripts/ci/provider_packages/ci_install_and_test_provider_packages.sh env: - INSTALL_AIRFLOW_VERSION: "wheel" + USE_AIRFLOW_VERSION: "wheel" PACKAGE_FORMAT: "wheel" - name: "Install and test provider packages and airflow on Airflow 2.0 files" run: ./scripts/ci/provider_packages/ci_install_and_test_provider_packages.sh env: - INSTALL_AIRFLOW_VERSION: "2.0.0" + USE_AIRFLOW_VERSION: "2.0.0" PACKAGE_FORMAT: "wheel" prepare-test-provider-packages-sdist: @@ -597,7 +597,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" - name: "Install and test provider packages and airflow via sdist files" run: ./scripts/ci/provider_packages/ci_install_and_test_provider_packages.sh env: - INSTALL_AIRFLOW_VERSION: "sdist" + USE_AIRFLOW_VERSION: "sdist" PACKAGE_FORMAT: "sdist" tests-helm: diff --git a/BREEZE.rst b/BREEZE.rst index ce5f6e5d4cb8f..4916b4ef63faf 100644 --- a/BREEZE.rst +++ b/BREEZE.rst @@ -1250,6 +1250,8 @@ This is the current syntax for `./breeze <./breeze>`_: 'breeze \ --github-image-id 209845560' - pull/use image with RUN_ID + Most flags are applicable to the shell command as it will run build when needed. + #################################################################################################### @@ -1310,41 +1312,21 @@ This is the current syntax for `./breeze <./breeze>`_: 2.7 3.5 3.6 3.7 3.8 -a, --install-airflow-version INSTALL_AIRFLOW_VERSION - In CI image, installs Airflow (in entrypoint) from PIP released version or using - the installation method specified (sdist, wheel, none). - - In PROD image the installation of selected method or version happens during image building. - For PROD image, the 'none' options is not valid. - - One of: + Uses differen version of Airflow when building PROD image. - 2.0.1 2.0.0 1.10.15 1.10.14 1.10.12 1.10.11 1.10.10 1.10.9 none wheel sdist - - When 'none' is used, you can install airflow from local packages. When building image, - airflow package should be added to 'docker-context-files' and - --install-from-docker-context-files flag should be used. When running an image, airflow - package should be added to dist folder and --install-packages-from-dist flag should be used. + 2.0.1 2.0.0 1.10.15 1.10.14 wheel sdist -t, --install-airflow-reference INSTALL_AIRFLOW_REFERENCE - If specified, installs Airflow directly from reference in GitHub. This happens at - image building time in production image and at container entering time for CI image. + Installs Airflow directly from reference in GitHub when building PROD image. This can be a GitHub branch like master or v1-10-test, or a tag like 2.0.0a1. --installation-method INSTALLATION_METHOD - Method of installing airflow for production image - either from the sources ('.') + Method of installing Airflow in PROD image - either from the sources ('.') or from package 'apache-airflow' to install from PyPI. Default in Breeze is to install from sources. One of: . apache-airflow - --no-rbac-ui - Disables RBAC UI when Airflow 1.10.* is installed. - - --install-packages-from-dist - If specified it will look for packages placed in dist folder and it will install the - packages after installing Airflow. This is useful for testing provider - packages. - --upgrade-to-newer-dependencies Upgrades PIP packages to latest versions available without looking at the constraints. @@ -1392,12 +1374,12 @@ This is the current syntax for `./breeze <./breeze>`_: are needed by the extras. When you build image during the development (which is default in Breeze) all providers are installed by default from sources. You can disable it by adding this flag but then you have to install providers from - wheel packages via --install-packages-from-dist flag. + wheel packages via --use-packages-from-dist flag. --disable-pypi-when-building Disable installing Airflow from pypi when building. If you use this flag and want to install Airflow, you have to install it from packages placed in - 'docker-context-files' and use --install-from-local-files-when-building flag. + 'docker-context-files' and use --install-from-docker-context-files flag. --additional-extras ADDITIONAL_EXTRAS Additional extras to pass to build images The default is no additional extras. @@ -1452,7 +1434,7 @@ This is the current syntax for `./breeze <./breeze>`_: --disable-pip-cache Disables GitHub PIP cache during the build. Useful if GitHub is not reachable during build. - --install-from-local-files-when-building + --install-from-docker-context-files This flag is used during image building. If it is used additionally to installing Airflow from PyPI, the packages are installed from the .whl and .tar.gz packages placed in the 'docker-context-files' folder. The same flag can be used during entering the image in @@ -1839,6 +1821,22 @@ This is the current syntax for `./breeze <./breeze>`_: Flags: + --use-airflow-version AIRFLOW_SPECIFICATION + In CI image, installs Airflow at runtime from PIP released version or using + the installation method specified (sdist, wheel, none). When 'none' is used, + airflow is just removed. In this case airflow package should be added to dist folder + and --use-packages-from-dist flag should be used. + + 2.0.1 2.0.0 1.10.15 1.10.14 wheel sdist none + + --use-packages-from-dist + In CI image, if specified it will look for packages placed in dist folder and + it will install the packages after entering the image. + This is useful for testing provider packages. + + --no-rbac-ui + Disables RBAC UI when Airflow 1.10.* is installed. + --load-example-dags Include Airflow example dags. @@ -2043,12 +2041,12 @@ This is the current syntax for `./breeze <./breeze>`_: are needed by the extras. When you build image during the development (which is default in Breeze) all providers are installed by default from sources. You can disable it by adding this flag but then you have to install providers from - wheel packages via --install-packages-from-dist flag. + wheel packages via --use-packages-from-dist flag. --disable-pypi-when-building Disable installing Airflow from pypi when building. If you use this flag and want to install Airflow, you have to install it from packages placed in - 'docker-context-files' and use --install-from-local-files-when-building flag. + 'docker-context-files' and use --install-from-docker-context-files flag. --additional-extras ADDITIONAL_EXTRAS Additional extras to pass to build images The default is no additional extras. @@ -2103,7 +2101,7 @@ This is the current syntax for `./breeze <./breeze>`_: --disable-pip-cache Disables GitHub PIP cache during the build. Useful if GitHub is not reachable during build. - --install-from-local-files-when-building + --install-from-docker-context-files This flag is used during image building. If it is used additionally to installing Airflow from PyPI, the packages are installed from the .whl and .tar.gz packages placed in the 'docker-context-files' folder. The same flag can be used during entering the image in @@ -2526,50 +2524,49 @@ This is the current syntax for `./breeze <./breeze>`_: Assume 'quit' answer to all questions. **************************************************************************************************** - Choose different Airflow version to install or run + Install different Airflow version during PROD image build -a, --install-airflow-version INSTALL_AIRFLOW_VERSION - In CI image, installs Airflow (in entrypoint) from PIP released version or using - the installation method specified (sdist, wheel, none). + Uses differen version of Airflow when building PROD image. - In PROD image the installation of selected method or version happens during image building. - For PROD image, the 'none' options is not valid. - - One of: - - 2.0.1 2.0.0 1.10.15 1.10.14 1.10.12 1.10.11 1.10.10 1.10.9 none wheel sdist - - When 'none' is used, you can install airflow from local packages. When building image, - airflow package should be added to 'docker-context-files' and - --install-from-docker-context-files flag should be used. When running an image, airflow - package should be added to dist folder and --install-packages-from-dist flag should be used. + 2.0.1 2.0.0 1.10.15 1.10.14 wheel sdist -t, --install-airflow-reference INSTALL_AIRFLOW_REFERENCE - If specified, installs Airflow directly from reference in GitHub. This happens at - image building time in production image and at container entering time for CI image. + Installs Airflow directly from reference in GitHub when building PROD image. This can be a GitHub branch like master or v1-10-test, or a tag like 2.0.0a1. --installation-method INSTALLATION_METHOD - Method of installing airflow for production image - either from the sources ('.') + Method of installing Airflow in PROD image - either from the sources ('.') or from package 'apache-airflow' to install from PyPI. Default in Breeze is to install from sources. One of: . apache-airflow - --no-rbac-ui - Disables RBAC UI when Airflow 1.10.* is installed. - - --install-packages-from-dist - If specified it will look for packages placed in dist folder and it will install the - packages after installing Airflow. This is useful for testing provider - packages. - --upgrade-to-newer-dependencies Upgrades PIP packages to latest versions available without looking at the constraints. --continue-on-pip-check-failure Continue even if 'pip check' fails. + **************************************************************************************************** + Use different Airflow version at runtime in CI image + + --use-airflow-version AIRFLOW_SPECIFICATION + In CI image, installs Airflow at runtime from PIP released version or using + the installation method specified (sdist, wheel, none). When 'none' is used, + airflow is just removed. In this case airflow package should be added to dist folder + and --use-packages-from-dist flag should be used. + + 2.0.1 2.0.0 1.10.15 1.10.14 wheel sdist none + + --use-packages-from-dist + In CI image, if specified it will look for packages placed in dist folder and + it will install the packages after entering the image. + This is useful for testing provider packages. + + --no-rbac-ui + Disables RBAC UI when Airflow 1.10.* is installed. + **************************************************************************************************** Credentials @@ -2618,12 +2615,12 @@ This is the current syntax for `./breeze <./breeze>`_: are needed by the extras. When you build image during the development (which is default in Breeze) all providers are installed by default from sources. You can disable it by adding this flag but then you have to install providers from - wheel packages via --install-packages-from-dist flag. + wheel packages via --use-packages-from-dist flag. --disable-pypi-when-building Disable installing Airflow from pypi when building. If you use this flag and want to install Airflow, you have to install it from packages placed in - 'docker-context-files' and use --install-from-local-files-when-building flag. + 'docker-context-files' and use --install-from-docker-context-files flag. --additional-extras ADDITIONAL_EXTRAS Additional extras to pass to build images The default is no additional extras. @@ -2678,7 +2675,7 @@ This is the current syntax for `./breeze <./breeze>`_: --disable-pip-cache Disables GitHub PIP cache during the build. Useful if GitHub is not reachable during build. - --install-from-local-files-when-building + --install-from-docker-context-files This flag is used during image building. If it is used additionally to installing Airflow from PyPI, the packages are installed from the .whl and .tar.gz packages placed in the 'docker-context-files' folder. The same flag can be used during entering the image in diff --git a/CI.rst b/CI.rst index 7b06a2f1b61cc..529a8a37c2d33 100644 --- a/CI.rst +++ b/CI.rst @@ -253,16 +253,6 @@ You can use those variables when you try to reproduce the build locally. +-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+ | ``HOST_HOME`` | | | | Home directory on the host. | +-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+ -| Image variables | -+-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+ -| ``INSTALL_AIRFLOW_VERSION`` | | | | Installs Airflow version from PyPI when | -| | | | | building image. Can be "none" to skip airflow | -| | | | | installation so that it can be installed from | -| | | | | locally prepared packages. | -+-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+ -| ``INSTALL_AIRFLOW_REFERENCE`` | | | | Installs Airflow version from GitHub | -| | | | | branch or tag. | -+-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+ | Version suffix variables | +-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+ | ``VERSION_SUFFIX_FOR_PYPI`` | | | | Version suffix used during provider | diff --git a/Dockerfile.ci b/Dockerfile.ci index 46f0b36f9009f..0f93b523b8f6f 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -260,9 +260,6 @@ ENV AIRFLOW_PRE_CACHED_PIP_PACKAGES=${AIRFLOW_PRE_CACHED_PIP_PACKAGES} ARG INSTALL_PROVIDERS_FROM_SOURCES="true" ENV INSTALL_PROVIDERS_FROM_SOURCES=${INSTALL_PROVIDERS_FROM_SOURCES} -ARG INSTALL_FROM_DOCKER_CONTEXT_FILES="" -ENV INSTALL_FROM_DOCKER_CONTEXT_FILES=${INSTALL_FROM_DOCKER_CONTEXT_FILES} - ARG INSTALL_FROM_PYPI="true" ENV INSTALL_FROM_PYPI=${INSTALL_FROM_PYPI} @@ -367,17 +364,6 @@ RUN if [[ ${INSTALL_FROM_PYPI} == "true" ]]; then \ bash /scripts/docker/install_airflow.sh; \ fi -# Only copy install_from_docker_context_files.sh to not invalidate cache on other script changes -COPY scripts/docker/install_from_docker_context_files.sh /scripts/docker/install_from_docker_context_files.sh - -# If wheel files are found in /docker-context-files during installation -# they are also installed additionally to whatever is installed from Airflow. -COPY docker-context-files/ /docker-context-files/ - -RUN if [[ ${INSTALL_FROM_DOCKER_CONTEXT_FILES} == "true" ]]; then \ - bash /scripts/docker/install_from_docker_context_files.sh; \ - fi - # Copy all the www/ files we need to compile assets. Done as two separate COPY # commands so as otherwise it copies the _contents_ of static/ in to www/ COPY airflow/www/webpack.config.js ${AIRFLOW_SOURCES}/airflow/www/ diff --git a/IMAGES.rst b/IMAGES.rst index b206a4816a827..0f01b68030e56 100644 --- a/IMAGES.rst +++ b/IMAGES.rst @@ -17,10 +17,10 @@ .. contents:: :local: -Airflow docker images +Airflow Docker images ===================== -Airflow has two images (build from Dockerfiles): +Airflow has two main images (build from Dockerfiles): * Production image (Dockerfile) - that can be used to build your own production-ready Airflow installation You can read more about building and using the production image in the @@ -30,6 +30,35 @@ Airflow has two images (build from Dockerfiles): * CI image (Dockerfile.ci) - used for running tests and local development. The image is built using `Dockerfile.ci `_ +PROD image +----------- + +The PROD image is a multi-segment image. The first segment "airflow-build-image" contains all the +build essentials and related dependencies that allow to install airflow locally. By default the image is +build from a released version of Airflow from GitHub, but by providing some extra arguments you can also +build it from local sources. This is particularly useful in CI environment where we are using the image +to run Kubernetes tests. See below for the list of arguments that should be provided to build +production image from the local sources. + +The image is primarily optimised for size of the final image, but also for speed of rebuilds - the +'airflow-build-image' segment uses the same technique as the CI builds for pre-installing PIP dependencies. +It first pre-installs them from the right GitHub branch and only after that final airflow installation is +done from either local sources or remote location (PIP or GitHub repository). + +You can read more details about building, extending and customizing the PROD image in the +`Latest documentation `_ + + +CI image +-------- + +The CI image is used by `Breeze `_ as shell image but it is also used during CI build. +The image is single segment image that contains Airflow installation with "all" dependencies installed. +It is optimised for rebuild speed. It installs PIP dependencies from the current branch first - +so that any changes in setup.py do not trigger reinstalling of all dependencies. +There is a second step of installation that re-installs the dependencies +from the latest sources so that we are sure that latest dependencies are installed. + Image naming conventions ======================== @@ -59,23 +88,36 @@ Since those are simply snapshots of the existing Python images, DockerHub does n copy of those images - all layers are mounted from the original Python images and those are merely labels pointing to those. -Building docker images -====================== +Building docker images from current sources +=========================================== + +The easy way to build the CI/PROD images is to use ``_. It uses a number of optimization +and caches to build it efficiently and fast when you are developing Airflow and need to update to +latest version. + +CI image, airflow package is always built from sources. When you execute the image, you can however use +the ``--use-airflow-version`` flag (or ``USE_AIRFLOW_VERSION`` environment variable) to remove +the preinstalled source version of Airflow and replace it with one of the possible installation methods: -The easiest way to build those images is to use ``_. +* "none" airflow is removed and not installed +* "wheel" airflow is removed and replaced with "wheel" version available in dist +* "sdist" airflow is removed and replaced with "sdist" version available in dist +* "" airflow is removed and installed from PyPI (with the specified version) -Note! Breeze by default builds production image from local sources. You can change it's behaviour by -providing ``--install-airflow-version`` parameter, where you can specify the -tag/branch used to download Airflow package from in GitHub repository. You can -also change the repository itself by adding ``--dockerhub-user`` and ``--dockerhub-repo`` flag values. +For PROD image by default production image is built from the latest sources when using Breeze, but when +you use it via docker build command, it uses the latest installed version of airflow and providers. +However, you can choose different installation methods as described in +`Building PROD docker images from released PIP packages <#building-prod-docker-images-from-released-packages>`_. +Detailed reference for building production image from different sources can be found in: +`Build Args reference `_ -You can build the CI image using this command: +You can build the CI image using current sources this command: .. code-block:: bash ./breeze build-image -You can build production image using this command: +You can build the PROD image using current sources with this command: .. code-block:: bash @@ -109,7 +151,8 @@ PIP dependencies) and will give you an image consistent with the one used during The command that builds the production image is optimised for size of the image. -In Breeze by default, the airflow is installed using local sources of Apache Airflow. +Building PROD docker images from released PIP packages +====================================================== You can also build production images from PIP packages via providing ``--install-airflow-version`` parameter to Breeze: @@ -119,6 +162,13 @@ parameter to Breeze: ./breeze build-image --python 3.7 --additional-extras=trino \ --production-image --install-airflow-version=2.0.0 +This will build the image using command similar to: + +.. code-block:: bash + + pip install \ + apache-airflow[async,amazon,celery,cncf.kubernetes,docker,dask,elasticsearch,ftp,grpc,hashicorp,http,ldap,google,microsoft.azure,mysql,postgres,redis,sendgrid,sftp,slack,ssh,statsd,virtualenv]==2.0.0 \ + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.0.0/constraints-3.6.txt" .. note:: @@ -141,13 +191,6 @@ parameter to Breeze: them to appropriate format and workflow that your tool requires. -This will build the image using command similar to: - -.. code-block:: bash - - pip install \ - apache-airflow[async,amazon,celery,cncf.kubernetes,docker,dask,elasticsearch,ftp,grpc,hashicorp,http,ldap,google,microsoft.azure,mysql,postgres,redis,sendgrid,sftp,slack,ssh,statsd,virtualenv]==2.0.0 \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.0.0/constraints-3.6.txt" You can also build production images from specific Git version via providing ``--install-airflow-reference`` parameter to Breeze (this time constraints are taken from the ``constraints-master`` branch which is the @@ -159,16 +202,15 @@ HEAD of development for constraints): --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-3.6.txt" You can also skip installing airflow and install it from locally provided files by using -``--install-from-local-files-when-building`` parameter and ``--disable-pypi-when-building`` to Breeze: +``--install-from-docker-context-files`` parameter and ``--disable-pypi-when-building`` to Breeze: .. code-block:: bash ./breeze build-image --python 3.7 --additional-extras=trino \ - --production-image --disable-pypi-when-building --install-from-local-files-when-building + --production-image --disable-pypi-when-building --install-from-docker-context-files In this case you airflow and all packages (.whl files) should be placed in ``docker-context-files`` folder. - Using cache during builds ========================= @@ -408,36 +450,13 @@ run with enabled Kerberos integration (assuming docker.pkg.github.com was used a You can see more details and examples in `Breeze `_ +Customizing the CI image +======================== -Technical details of Airflow images -=================================== - -The CI image is used by Breeze as shell image but it is also used during CI build. -The image is single segment image that contains Airflow installation with "all" dependencies installed. -It is optimised for rebuild speed. It installs PIP dependencies from the current branch first - -so that any changes in setup.py do not trigger reinstalling of all dependencies. -There is a second step of installation that re-installs the dependencies -from the latest sources so that we are sure that latest dependencies are installed. - -The production image is a multi-segment image. The first segment "airflow-build-image" contains all the -build essentials and related dependencies that allow to install airflow locally. By default the image is -build from a released version of Airflow from GitHub, but by providing some extra arguments you can also -build it from local sources. This is particularly useful in CI environment where we are using the image -to run Kubernetes tests. See below for the list of arguments that should be provided to build -production image from the local sources. - -The image is primarily optimised for size of the final image, but also for speed of rebuilds - the -'airflow-build-image' segment uses the same technique as the CI builds for pre-installing PIP dependencies. -It first pre-installs them from the right GitHub branch and only after that final airflow installation is -done from either local sources or remote location (PIP or GitHub repository). - -Customizing the image ---------------------- - -Customizing the image is an alternative way of adding your own dependencies to the image. +Customizing the CI image allows to add your own dependencies to the image. -The easiest way to build the image is to use ``breeze`` script, but you can also build such customized -image by running appropriately crafted docker build in which you specify all the ``build-args`` +The easiest way to build the customized image is to use ``breeze`` script, but you can also build suc +customized image by running appropriately crafted docker build in which you specify all the ``build-args`` that you need to add to customize it. You can read about all the args and ways you can build the image in the `<#ci-image-build-arguments>`_ chapter below. @@ -450,11 +469,6 @@ additional apt dev and runtime dependencies. docker build . -f Dockerfile.ci \ --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \ - --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \ - --build-arg AIRFLOW_VERSION="2.0.0" \ - --build-arg AIRFLOW_VERSION_SPECIFICATION="==2.0.0" \ - --build-arg AIRFLOW_SOURCES_FROM="empty" \ - --build-arg AIRFLOW_SOURCES_TO="/empty" \ --build-arg ADDITIONAL_AIRFLOW_EXTRAS="jdbc" --build-arg ADDITIONAL_PYTHON_DEPS="pandas" --build-arg ADDITIONAL_DEV_APT_DEPS="gcc g++" @@ -467,12 +481,9 @@ the same image can be built using ``breeze`` (it supports auto-completion of the .. code-block:: bash ./breeze build-image -f Dockerfile.ci \ - --production-image --python 3.7 --install-airflow-version=2.0.0 \ + --production-image --python 3.7 \ --additional-extras=jdbc --additional-python-deps="pandas" \ --additional-dev-apt-deps="gcc g++" --additional-runtime-apt-deps="default-jre-headless" -You can build the default production image with standard ``docker build`` command but they will only build -default versions of the image and will not use the dockerhub versions of images as cache. - You can customize more aspects of the image - such as additional commands executed before apt dependencies are installed, or adding extra sources to install your dependencies from. You can see all the arguments @@ -484,10 +495,6 @@ based on example in `this comment `_ - - -Image manifests ---------------- +CI Image manifests +------------------ Together with the main CI images we also build and push image manifests. Those manifests are very small images that contain only content of randomly generated file at the 'crucial' part of the CI image building. @@ -750,6 +707,9 @@ the repo as this will likely be faster than rebuilding the image locally. The random UUID is generated right after pre-cached pip install is run - and usually it means that significant changes have been made to apt packages or even the base Python image has changed. +Working with the images +======================= + Pulling the Latest Images ------------------------- @@ -771,7 +731,7 @@ however uou can also force it with the same flag. ./breeze build-image --force-pull-images Refreshing Base Python images -============================= +----------------------------- Python base images are updated from time-to-time, usually as a result of implementing security fixes. When you build your image locally using ``docker build`` you use the version of image that you have locally. @@ -802,18 +762,8 @@ GitHub Registies in order to be able to do that. ./breeze push-image --github-registry docker.pkg.github.com --production-image done - - - -Embedded image scripts -====================== - -Both images have a set of scripts that can be used in the image. Those are: - * /entrypoint - entrypoint script used when entering the image - * /clean-logs - script for periodic log cleaning - Running the CI image -==================== +-------------------- The entrypoint in the CI image contains all the initialisation needed for tests to be immediately executed. It is copied from ``scripts/in_container/entrypoint_ci.sh``. @@ -826,8 +776,8 @@ The entrypoint performs those operations: * checks if the environment is ready to test (including database and all integrations). It waits until all the components are ready to work -* installs older version of Airflow (if older version of Airflow is requested to be installed - via ``INSTALL_AIRFLOW_VERSION`` variable. +* removes and re-installs another version of Airflow (if another version of Airflow is requested to be + reinstalled via ``USE_AIRFLOW_PYPI_VERSION`` variable. * Sets up Kerberos if Kerberos integration is enabled (generates and configures Kerberos token) @@ -846,10 +796,3 @@ The entrypoint performs those operations: * Sets default "tests" target in case the target is not explicitly set as additional argument * Runs system tests if RUN_SYSTEM_TESTS flag is specified, otherwise runs regular unit and integration tests - - -Using, customising, and extending the production image -====================================================== - -You can read more about using, customising, and extending the production image in the -`documentation `_. diff --git a/TESTING.rst b/TESTING.rst index 766be56e29238..e306b8db94bcf 100644 --- a/TESTING.rst +++ b/TESTING.rst @@ -545,7 +545,7 @@ This installs airflow and enters .. code-block:: bash - ./breeze --install-airflow-version wheel --install-packages-from-dist --skip-mounting-local-sources + ./breeze --use-airflow-version wheel --use-packages-from-dist --skip-mounting-local-sources @@ -1277,13 +1277,11 @@ The DAGs can be run in the master version of Airflow but they also work with older versions. To run the tests for Airflow 1.10.* series, you need to run Breeze with -``--install-airflow-version==`` to install a different version of Airflow. -If ``current`` is specified (default), then the current version of Airflow is used. -Otherwise, the released version of Airflow is installed. +``--use-airflow-pypi-version=`` to re-install a different version of Airflow. You should also consider running it with ``restart`` command when you change the installed version. This will clean-up the database so that you start with a clean DB and not DB installed in a previous version. -So typically you'd run it like ``breeze --install-airflow-version=1.10.9 restart``. +So typically you'd run it like ``breeze --use-airflow-pypi-version=1.10.9 restart``. Tracking SQL statements ======================= diff --git a/airflow/config_templates/config.yml b/airflow/config_templates/config.yml index edee338bba356..bdba5d0f20be3 100644 --- a/airflow/config_templates/config.yml +++ b/airflow/config_templates/config.yml @@ -243,7 +243,7 @@ version_added: ~ type: float example: ~ - default: "30.0" + default: "30" - name: dagbag_import_error_tracebacks description: | Should a traceback be shown in the UI for dagbag import errors, diff --git a/airflow/config_templates/default_airflow.cfg b/airflow/config_templates/default_airflow.cfg index c498d466f41e5..68901018540d2 100644 --- a/airflow/config_templates/default_airflow.cfg +++ b/airflow/config_templates/default_airflow.cfg @@ -148,7 +148,7 @@ fernet_key = {FERNET_KEY} donot_pickle = True # How long before timing out a python file import -dagbag_import_timeout = 30.0 +dagbag_import_timeout = 30 # Should a traceback be shown in the UI for dagbag import errors, # instead of just the exception message diff --git a/breeze b/breeze index df24b145e4eb1..d30d9c1c43e1b 100755 --- a/breeze +++ b/breeze @@ -163,14 +163,16 @@ function breeze::setup_default_breeze_constants() { # of Airflow is removed and the specified version of Airflow is installed from GitHub export INSTALL_AIRFLOW_REFERENCE="" - # if set to true, the ci image will look for wheel packages in dist folder and will install them - # during entering the container - export INSTALL_PACKAGES_FROM_DIST="false" - # Determines whether to force build without checking if it is needed # Can be overridden by '--force-build-images' flag. export FORCE_BUILD_IMAGES="false" + # Determines whether to reinstall airflow at entering the image. + export USE_AIRFLOW_VERSION="" + # if set to true, the ci image will look for wheel packages in dist folder and will install them + # during entering the container + export USE_PACKAGES_FROM_DIST="false" + # load all the common functions here - those are the functions that are shared between Breeze # and CI scripts. The CI scripts do not use Breeze as driving script - they read all configuration # from the environment variables. That's why we keep all the common initialization in those libs @@ -505,10 +507,10 @@ EOF DockerHub repo: ${DOCKERHUB_REPO} Backend: ${BACKEND} ${backend_version} EOF - if [[ -n ${INSTALL_AIRFLOW_VERSION=} || -n ${INSTALL_AIRFLOW_REFERENCE=} ]]; then + if [[ -n ${USE_AIRFLOW_VERSION=} ]]; then cat <] '${CMDNAME} \\ --github-image-id 209845560' - pull/use image with RUN_ID +Most flags are applicable to the shell command as it will run build when needed. " readonly DETAILED_USAGE_SHELL export DETAILED_USAGE_EXEC=" @@ -1748,7 +1765,7 @@ ${CMDNAME} build-image [FLAGS] Flags: $(breeze::flag_airflow_variants) -$(breeze::flag_choose_different_airflow_version) +$(breeze::flag_build_different_airflow_version) $(breeze::flag_production_image) $(breeze::flag_build_docker_images) $(breeze::flag_pull_push_docker_images) @@ -1982,6 +1999,7 @@ ${CMDNAME} start-airflow If you want to load default connections and example dags you can use the dedicated flags. Flags: +$(breeze::flag_use_different_airflow_version) $(breeze::flag_start_airflow) " readonly DETAILED_USAGE_START_AIRFLOW @@ -2379,7 +2397,7 @@ function breeze::flag_local_file_mounting() { ####################################################################################################### # -# Prints flags that allow to choose different airflow variants +# Prints flags that allow to build different airflow variants # # Global constants used: # FORMATTED_INSTALL_AIRFLOW_VERSIONS @@ -2388,44 +2406,24 @@ function breeze::flag_local_file_mounting() { # Outputs: # Flag information. ####################################################################################################### -function breeze::flag_choose_different_airflow_version() { +function breeze::flag_build_different_airflow_version() { echo " -a, --install-airflow-version INSTALL_AIRFLOW_VERSION - In CI image, installs Airflow (in entrypoint) from PIP released version or using - the installation method specified (sdist, wheel, none). - - In PROD image the installation of selected method or version happens during image building. - For PROD image, the 'none' options is not valid. - - One of: + Uses differen version of Airflow when building PROD image. ${FORMATTED_INSTALL_AIRFLOW_VERSIONS} - When 'none' is used, you can install airflow from local packages. When building image, - airflow package should be added to 'docker-context-files' and - --install-from-docker-context-files flag should be used. When running an image, airflow - package should be added to dist folder and --install-packages-from-dist flag should be used. - -t, --install-airflow-reference INSTALL_AIRFLOW_REFERENCE - If specified, installs Airflow directly from reference in GitHub. This happens at - image building time in production image and at container entering time for CI image. + Installs Airflow directly from reference in GitHub when building PROD image. This can be a GitHub branch like master or v1-10-test, or a tag like 2.0.0a1. --installation-method INSTALLATION_METHOD - Method of installing airflow for production image - either from the sources ('.') + Method of installing Airflow in PROD image - either from the sources ('.') or from package 'apache-airflow' to install from PyPI. Default in Breeze is to install from sources. One of: ${FORMATTED_INSTALLATION_METHOD} ---no-rbac-ui - Disables RBAC UI when Airflow 1.10.* is installed. - ---install-packages-from-dist - If specified it will look for packages placed in dist folder and it will install the - packages after installing Airflow. This is useful for testing provider - packages. - --upgrade-to-newer-dependencies Upgrades PIP packages to latest versions available without looking at the constraints. @@ -2435,6 +2433,41 @@ ${FORMATTED_INSTALLATION_METHOD} " } + +####################################################################################################### +# +# Prints flags that allow to build different airflow variants +# +# Global constants used: +# FORMATTED_INSTALL_AIRFLOW_VERSIONS +# +# +# Outputs: +# Flag information. +####################################################################################################### +function breeze::flag_use_different_airflow_version() { + echo " +--use-airflow-version AIRFLOW_SPECIFICATION + In CI image, installs Airflow at runtime from PIP released version or using + the installation method specified (sdist, wheel, none). When 'none' is used, + airflow is just removed. In this case airflow package should be added to dist folder + and --use-packages-from-dist flag should be used. + +${FORMATTED_USE_AIRFLOW_VERSION} + +--use-packages-from-dist + In CI image, if specified it will look for packages placed in dist folder and + it will install the packages after entering the image. + This is useful for testing provider packages. + +--no-rbac-ui + Disables RBAC UI when Airflow 1.10.* is installed. + + +" +} + + ####################################################################################################### # # Prints flags that allow to choose variants of constraint generation @@ -2605,12 +2638,12 @@ ${FORMATTED_DEFAULT_PROD_EXTRAS} are needed by the extras. When you build image during the development (which is default in Breeze) all providers are installed by default from sources. You can disable it by adding this flag but then you have to install providers from - wheel packages via --install-packages-from-dist flag. + wheel packages via --use-packages-from-dist flag. --disable-pypi-when-building Disable installing Airflow from pypi when building. If you use this flag and want to install Airflow, you have to install it from packages placed in - 'docker-context-files' and use --install-from-local-files-when-building flag. + 'docker-context-files' and use --install-from-docker-context-files flag. --additional-extras ADDITIONAL_EXTRAS Additional extras to pass to build images The default is no additional extras. @@ -2665,7 +2698,7 @@ Build options: --disable-pip-cache Disables GitHub PIP cache during the build. Useful if GitHub is not reachable during build. ---install-from-local-files-when-building +--install-from-docker-context-files This flag is used during image building. If it is used additionally to installing Airflow from PyPI, the packages are installed from the .whl and .tar.gz packages placed in the 'docker-context-files' folder. The same flag can be used during entering the image in @@ -2888,8 +2921,12 @@ $(breeze::print_star_line) $(breeze::flag_assume_answers_to_questions) $(breeze::print_star_line) - Choose different Airflow version to install or run -$(breeze::flag_choose_different_airflow_version) + Install different Airflow version during PROD image build +$(breeze::flag_build_different_airflow_version) + +$(breeze::print_star_line) + Use different Airflow version at runtime in CI image +$(breeze::flag_use_different_airflow_version) $(breeze::print_star_line) Credentials diff --git a/breeze-complete b/breeze-complete index 2004a1a9ae185..7fc8f78148493 100644 --- a/breeze-complete +++ b/breeze-complete @@ -61,16 +61,17 @@ _breeze_allowed_install_airflow_versions=$(cat <<-EOF 2.0.0 1.10.15 1.10.14 -1.10.12 -1.10.11 -1.10.10 -1.10.9 -none wheel sdist EOF ) +_breeze_allowed_USE_AIRFLOW_VERSION=$(cat <<-EOF +${_breeze_allowed_install_airflow_versions} +none +EOF +) + _breeze_allowed_static_checks=$(cat <<-EOF all all-but-pylint @@ -179,7 +180,8 @@ additional-extras: additional-python-deps: disable-pypi-when-building skip-insta dev-apt-deps: additional-dev-apt-deps: dev-apt-command: additional-dev-apt-command: additional-dev-apt-env: runtime-apt-deps: additional-runtime-apt-deps: runtime-apt-command: additional-runtime-apt-command: additional-runtime-apt-env: load-default-connections load-example-dags -install-packages-from-dist no-rbac-ui package-format: upgrade-to-newer-dependencies installation-method: continue-on-pip-check-failure +use-packages-from-dist no-rbac-ui package-format: upgrade-to-newer-dependencies installation-method: continue-on-pip-check-failure +use-airflow-version: test-type: preserve-volumes dry-run-docker " @@ -261,9 +263,12 @@ function breeze_complete::get_known_values_breeze() { static-check) _breeze_known_values=${_breeze_allowed_static_checks} ;; - -A | --install-airflow-version) + -a | --install-airflow-version) _breeze_known_values=${_breeze_allowed_install_airflow_versions} ;; + --use-airflow-version) + _breeze_known_values=${_breeze_allowed_USE_AIRFLOW_VERSION} + ;; docker-compose) # shellcheck disable=SC2034 if typeset -f "_docker_compose" >/dev/null; then diff --git a/dev/README_RELEASE_AIRFLOW.md b/dev/README_RELEASE_AIRFLOW.md index 551019d8d50ed..4f0a96e0af3f2 100644 --- a/dev/README_RELEASE_AIRFLOW.md +++ b/dev/README_RELEASE_AIRFLOW.md @@ -502,13 +502,13 @@ There is also an easy way of installation with Breeze if you have the latest sou Running the following command will use tmux inside breeze, create `admin` user and run Webserver & Scheduler: ```shell script -./breeze start-airflow --install-airflow-version rc --python 3.7 --backend postgres +./breeze start-airflow --use-airflow-version rc --python 3.7 --backend postgres ``` For 1.10 releases you can also use `--no-rbac-ui` flag disable RBAC UI of Airflow: ```shell script -./breeze start-airflow --install-airflow-version rc --python 3.7 --backend postgres --no-rbac-ui +./breeze start-airflow --use-airflow-version rc --python 3.7 --backend postgres --no-rbac-ui ``` Once you install and run Airflow, you should perform any verification you see as necessary to check diff --git a/dev/README_RELEASE_PROVIDER_PACKAGES.md b/dev/README_RELEASE_PROVIDER_PACKAGES.md index 1c5ba8ab340b4..b13efcffd80e5 100644 --- a/dev/README_RELEASE_PROVIDER_PACKAGES.md +++ b/dev/README_RELEASE_PROVIDER_PACKAGES.md @@ -573,8 +573,8 @@ Here is a typical scenario. First copy all the provider packages .whl files to the `dist` folder. ```shell script -./breeze start-airflow --install-airflow-version rc \ - --python 3.7 --backend postgres --install-packages-from-dist +./breeze start-airflow --use-airflow-version rc \ + --python 3.7 --backend postgres --use-packages-from-dist ``` ### Building your own docker image diff --git a/dev/provider_packages/README.md b/dev/provider_packages/README.md index 7fc3f26db900a..a8da8edd623e4 100644 --- a/dev/provider_packages/README.md +++ b/dev/provider_packages/README.md @@ -455,7 +455,8 @@ This prepares airflow package in the "dist" folder 2. Enter the container: ```shell script -export INSTALL_AIRFLOW_VERSION="wheel" +export USE_AIRFLOW_VERSION="wheel" +export USE_PACKAGES_FROM_DIST="true" ./dev/provider_packages/enter_breeze_provider_package_tests.sh ``` diff --git a/docker-context-files/README.md b/docker-context-files/README.md index 9ec5b0ff23ae3..e2aa1a0071b6b 100644 --- a/docker-context-files/README.md +++ b/docker-context-files/README.md @@ -26,7 +26,7 @@ the [docker-context-files](.) folder to the image context - in case of productio the build segment, co content of the folder is available in the `/docker-context-file` folder inside the build image. You can store constraint files and wheel packages there that you want to install as PYPI packages and refer to those packages using -`--constraint-location` flag for constraints or by using `--install-from-local-files-when-building` flag. +`--constraint-location` flag for constraints or by using `--install-from-docker-context-files` flag. By default, the content of this folder is .gitignored so that any binaries and files you put here are only used for local builds and not committed to the repository. diff --git a/scripts/ci/docker-compose/_docker.env b/scripts/ci/docker-compose/_docker.env index 3d239e0872a1e..95375897a8986 100644 --- a/scripts/ci/docker-compose/_docker.env +++ b/scripts/ci/docker-compose/_docker.env @@ -40,7 +40,8 @@ INIT_SCRIPT_FILE INSTALL_AIRFLOW_VERSION GENERATE_CONSTRAINTS_MODE INSTALL_PROVIDERS_FROM_SOURCES -INSTALL_PACKAGES_FROM_DIST +USE_AIRFLOW_VERSION +USE_PACKAGES_FROM_DIST ISSUE_ID LOAD_DEFAULT_CONNECTIONS LOAD_EXAMPLES diff --git a/scripts/ci/libraries/_build_images.sh b/scripts/ci/libraries/_build_images.sh index da015b9473cf7..b0dabb35e58ff 100644 --- a/scripts/ci/libraries/_build_images.sh +++ b/scripts/ci/libraries/_build_images.sh @@ -743,7 +743,6 @@ Docker building ${AIRFLOW_CI_IMAGE}. --build-arg AIRFLOW_BRANCH="${BRANCH_NAME}" \ --build-arg AIRFLOW_EXTRAS="${AIRFLOW_EXTRAS}" \ --build-arg AIRFLOW_PRE_CACHED_PIP_PACKAGES="${AIRFLOW_PRE_CACHED_PIP_PACKAGES}" \ - --build-arg INSTALL_PROVIDERS_FROM_SOURCES="${INSTALL_PROVIDERS_FROM_SOURCES}" \ --build-arg ADDITIONAL_AIRFLOW_EXTRAS="${ADDITIONAL_AIRFLOW_EXTRAS}" \ --build-arg ADDITIONAL_PYTHON_DEPS="${ADDITIONAL_PYTHON_DEPS}" \ --build-arg ADDITIONAL_DEV_APT_COMMAND="${ADDITIONAL_DEV_APT_COMMAND}" \ @@ -752,8 +751,6 @@ Docker building ${AIRFLOW_CI_IMAGE}. --build-arg ADDITIONAL_RUNTIME_APT_COMMAND="${ADDITIONAL_RUNTIME_APT_COMMAND}" \ --build-arg ADDITIONAL_RUNTIME_APT_DEPS="${ADDITIONAL_RUNTIME_APT_DEPS}" \ --build-arg ADDITIONAL_RUNTIME_APT_ENV="${ADDITIONAL_RUNTIME_APT_ENV}" \ - --build-arg INSTALL_FROM_PYPI="${INSTALL_FROM_PYPI}" \ - --build-arg INSTALL_FROM_DOCKER_CONTEXT_FILES="${INSTALL_FROM_DOCKER_CONTEXT_FILES}" \ --build-arg UPGRADE_TO_NEWER_DEPENDENCIES="${UPGRADE_TO_NEWER_DEPENDENCIES}" \ --build-arg CONTINUE_ON_PIP_CHECK_FAILURE="${CONTINUE_ON_PIP_CHECK_FAILURE}" \ --build-arg CONSTRAINTS_GITHUB_REPOSITORY="${CONSTRAINTS_GITHUB_REPOSITORY}" \ diff --git a/scripts/ci/libraries/_initialization.sh b/scripts/ci/libraries/_initialization.sh index de9a2dd975f1b..b034710041bfe 100644 --- a/scripts/ci/libraries/_initialization.sh +++ b/scripts/ci/libraries/_initialization.sh @@ -132,7 +132,7 @@ function initialization::initialize_base_variables() { # if set to true, the ci image will look for packages in dist folder and will install them # during entering the container - export INSTALL_PACKAGES_FROM_DIST=${INSTALL_PACKAGES_FROM_DIST:="false"} + export USE_PACKAGES_FROM_DIST=${USE_PACKAGES_FROM_DIST:="false"} # If set the specified file will be used to initialize Airflow after the environment is created, # otherwise it will use files/airflow-breeze-config/init.sh @@ -464,6 +464,13 @@ function initialization::initialize_image_build_variables() { # * 'constraints-source-providers' for constraints with source version of providers (defaults in Breeze and CI) # * 'constraints-no-providers' for constraints without providers export AIRFLOW_CONSTRAINTS="${AIRFLOW_CONSTRAINTS:="constraints-source-providers"}" + + # Replace airflow at runtime in CI image with the one specified + # * none - just removes airflow + # * wheel - replaces airflow with one specified in the wheel file in /dist + # * wheel - replaces airflow with one specified in the sdist file in /dist + # * - replaces airflow with the specific version from PyPI + export USE_AIRFLOW_VERSION=${USE_AIRFLOW_VERSION:=""} } # Determine version suffixes used to build provider packages @@ -711,7 +718,8 @@ Initialization variables: INIT_SCRIPT_FILE: '${INIT_SCRIPT_FILE=}' LOAD_DEFAULT_CONNECTIONS: '${LOAD_DEFAULT_CONNECTIONS}' LOAD_EXAMPLES: '${LOAD_EXAMPLES}' - INSTALL_PACKAGES_FROM_DIST: '${INSTALL_PACKAGES_FROM_DIST=}' + USE_AIRFLOW_VERSION: '${USE_AIRFLOW_VERSION=}' + USE_PACKAGES_FROM_DIST: '${USE_PACKAGES_FROM_DIST=}' DISABLE_RBAC: '${DISABLE_RBAC}' Test variables: @@ -791,6 +799,8 @@ function initialization::make_constants_read_only() { readonly INSTALL_AIRFLOW_VERSION readonly INSTALL_AIRFLOW_REFERENCE + readonly USE_AIRFLOW_VERSION + readonly DB_RESET readonly VERBOSE diff --git a/scripts/ci/libraries/_testing.sh b/scripts/ci/libraries/_testing.sh index 638daf5e88c8e..1a050937f564f 100644 --- a/scripts/ci/libraries/_testing.sh +++ b/scripts/ci/libraries/_testing.sh @@ -44,7 +44,7 @@ function testing::get_docker_compose_local() { DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/forward-credentials.yml") fi - if [[ -n ${INSTALL_AIRFLOW_VERSION=} || -n ${INSTALL_AIRFLOW_REFERENCE} ]]; then + if [[ -n ${USE_AIRFLOW_VERSION=} ]]; then DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/remove-sources.yml") fi readonly DOCKER_COMPOSE_LOCAL diff --git a/scripts/ci/testing/ci_run_single_airflow_test_in_docker.sh b/scripts/ci/testing/ci_run_single_airflow_test_in_docker.sh index 0bf415f012c13..012493d569951 100755 --- a/scripts/ci/testing/ci_run_single_airflow_test_in_docker.sh +++ b/scripts/ci/testing/ci_run_single_airflow_test_in_docker.sh @@ -43,9 +43,10 @@ function prepare_tests() { DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/forward-credentials.yml") fi - if [[ -n ${INSTALL_AIRFLOW_VERSION=} || -n ${INSTALL_AIRFLOW_REFERENCE} ]]; then + if [[ -n ${USE_AIRFLOW_VERSION=} ]]; then DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/remove-sources.yml") fi + readonly DOCKER_COMPOSE_LOCAL if [[ ${TEST_TYPE:=} == "Integration" ]]; then diff --git a/scripts/in_container/bin/run_tmux b/scripts/in_container/bin/run_tmux index 08ed70da6756f..e93988af3f8f4 100755 --- a/scripts/in_container/bin/run_tmux +++ b/scripts/in_container/bin/run_tmux @@ -51,9 +51,11 @@ tmux split-window -h tmux select-pane -t 2 tmux send-keys 'airflow webserver' C-m -tmux select-pane -t 0 -tmux split-window -h -tmux send-keys 'cd /opt/airflow/airflow/www/; yarn dev' C-m +if [[ -z "${USE_AIRFLOW_VERSION=}" ]]; then + tmux select-pane -t 0 + tmux split-window -h + tmux send-keys 'cd /opt/airflow/airflow/www/; yarn dev' C-m +fi # Attach Session, on the Main window tmux select-pane -t 0 diff --git a/scripts/in_container/entrypoint_ci.sh b/scripts/in_container/entrypoint_ci.sh index 16aabbb07fee2..a7b5788e7226a 100755 --- a/scripts/in_container/entrypoint_ci.sh +++ b/scripts/in_container/entrypoint_ci.sh @@ -56,40 +56,42 @@ echo RUN_TESTS=${RUN_TESTS:="false"} CI=${CI:="false"} -INSTALL_AIRFLOW_VERSION="${INSTALL_AIRFLOW_VERSION:=""}" +USE_AIRFLOW_VERSION="${USE_AIRFLOW_VERSION:=""}" -if [[ ${AIRFLOW_VERSION} == *1.10* || ${INSTALL_AIRFLOW_VERSION} == *1.10* ]]; then +if [[ ${AIRFLOW_VERSION} == *1.10* || ${USE_AIRFLOW_VERSION} == *1.10* ]]; then export RUN_AIRFLOW_1_10="true" else export RUN_AIRFLOW_1_10="false" fi -if [[ -z ${INSTALL_AIRFLOW_VERSION=} ]]; then +if [[ -z ${USE_AIRFLOW_VERSION=} ]]; then export PYTHONPATH=${AIRFLOW_SOURCES} echo echo "Using already installed airflow version" echo - pushd "${AIRFLOW_SOURCES}/airflow/www/" >/dev/null - ./ask_for_recompile_assets_if_needed.sh - popd >/dev/null + if [[ -d "${AIRFLOW_SOURCES}/airflow/www/" ]]; then + pushd "${AIRFLOW_SOURCES}/airflow/www/" >/dev/null + ./ask_for_recompile_assets_if_needed.sh + popd >/dev/null + fi # Cleanup the logs, tmp when entering the environment sudo rm -rf "${AIRFLOW_SOURCES}"/logs/* sudo rm -rf "${AIRFLOW_SOURCES}"/tmp/* mkdir -p "${AIRFLOW_SOURCES}"/logs/ mkdir -p "${AIRFLOW_SOURCES}"/tmp/ -elif [[ ${INSTALL_AIRFLOW_VERSION} == "none" ]]; then +elif [[ ${USE_AIRFLOW_VERSION} == "none" ]]; then echo echo "Skip installing airflow - only install wheel/tar.gz packages that are present locally" echo uninstall_airflow_and_providers -elif [[ ${INSTALL_AIRFLOW_VERSION} == "wheel" ]]; then +elif [[ ${USE_AIRFLOW_VERSION} == "wheel" ]]; then echo echo "Install airflow from wheel package with [${AIRFLOW_EXTRAS}] extras but uninstalling providers." echo uninstall_airflow_and_providers install_airflow_from_wheel "[${AIRFLOW_EXTRAS}]" uninstall_providers -elif [[ ${INSTALL_AIRFLOW_VERSION} == "sdist" ]]; then +elif [[ ${USE_AIRFLOW_VERSION} == "sdist" ]]; then echo echo "Install airflow from sdist package with [${AIRFLOW_EXTRAS}] extras but uninstalling providers." echo @@ -100,12 +102,12 @@ else echo echo "Install airflow from PyPI without extras" echo - install_released_airflow_version "${INSTALL_AIRFLOW_VERSION}" + install_released_airflow_version "${USE_AIRFLOW_VERSION}" fi -if [[ ${INSTALL_PACKAGES_FROM_DIST=} == "true" ]]; then +if [[ ${USE_PACKAGES_FROM_DIST=} == "true" ]]; then echo echo "Install all packages from dist folder" - if [[ ${INSTALL_AIRFLOW_VERSION} == "wheel" ]]; then + if [[ ${USE_AIRFLOW_VERSION} == "wheel" ]]; then echo "(except apache-airflow)" fi if [[ ${PACKAGE_FORMAT} == "both" ]]; then @@ -118,7 +120,7 @@ if [[ ${INSTALL_PACKAGES_FROM_DIST=} == "true" ]]; then installable_files=() for file in /dist/*.{whl,tar.gz} do - if [[ ${INSTALL_AIRFLOW_VERSION} == "wheel" && ${file} == "apache?airflow-[0-9]"* ]]; then + if [[ ${USE_AIRFLOW_VERSION} == "wheel" && ${file} == "apache?airflow-[0-9]"* ]]; then # Skip Apache Airflow package - it's just been installed above with extras echo "Skipping ${file}" continue diff --git a/scripts/in_container/run_install_and_test_provider_packages.sh b/scripts/in_container/run_install_and_test_provider_packages.sh index 7326adfbeb2bc..64dc09c82abeb 100755 --- a/scripts/in_container/run_install_and_test_provider_packages.sh +++ b/scripts/in_container/run_install_and_test_provider_packages.sh @@ -24,7 +24,7 @@ function verify_parameters(){ echo "Testing if all classes in import packages can be imported" echo - if [[ -z "${INSTALL_AIRFLOW_VERSION=""}" ]]; then + if [[ -z "${USE_AIRFLOW_VERSION=""}" ]]; then echo echo "${COLOR_RED}ERROR: You have to specify airflow version to install.${COLOR_RESET}" echo @@ -46,19 +46,19 @@ function verify_parameters(){ function install_airflow_as_specified() { group_start "Install Airflow as specified" - if [[ ${INSTALL_AIRFLOW_VERSION} == "none" ]]; then + if [[ ${USE_AIRFLOW_VERSION} == "none" ]]; then echo echo "Skip installing airflow - only install wheel packages that are present locally" echo uninstall_airflow_and_providers - elif [[ ${INSTALL_AIRFLOW_VERSION} == "wheel" ]]; then + elif [[ ${USE_AIRFLOW_VERSION} == "wheel" ]]; then echo echo "Install airflow from wheel including [${AIRFLOW_EXTRAS}] extras" echo uninstall_airflow_and_providers install_airflow_from_wheel "[${AIRFLOW_EXTRAS}]" uninstall_providers - elif [[ ${INSTALL_AIRFLOW_VERSION} == "sdist" ]]; then + elif [[ ${USE_AIRFLOW_VERSION} == "sdist" ]]; then echo echo "Install airflow from sdist including [${AIRFLOW_EXTRAS}] extras" echo @@ -69,7 +69,7 @@ function install_airflow_as_specified() { echo echo "Install airflow from PyPI without extras" echo - install_released_airflow_version "${INSTALL_AIRFLOW_VERSION}" + install_released_airflow_version "${USE_AIRFLOW_VERSION}" uninstall_providers fi group_end