From 5609246d126b8770371b1b1e30422c4cb91506d8 Mon Sep 17 00:00:00 2001 From: sancharigr Date: Tue, 16 Jan 2024 19:27:17 +0530 Subject: [PATCH] Testing --- .github/workflows/ci-github-actions.yml | 152 +- .github/workflows/ci-model-regression.yml | 1846 +++++++------- .github/workflows/continous-integration.yml | 2434 +++++++++---------- .github/workflows/documentation.yml | 450 ++-- 4 files changed, 2441 insertions(+), 2441 deletions(-) diff --git a/.github/workflows/ci-github-actions.yml b/.github/workflows/ci-github-actions.yml index 4f354bc1aa43..6868128cca80 100644 --- a/.github/workflows/ci-github-actions.yml +++ b/.github/workflows/ci-github-actions.yml @@ -1,76 +1,76 @@ -name: CI Github Actions - -on: - push: - branches: - - main - tags: - - "*" - pull_request: - -env: - DEFAULT_PYTHON_VERSION: "3.10" - -jobs: - test: - name: Run Tests - runs-on: ubuntu-22.04 - #missing matrix - strategy: - fail-fast: false - - steps: - - name: Checkout git repository πŸ• - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - - - name: Download gomplate - run: |- - sudo curl -o /usr/local/bin/gomplate -sSL https://github.com/hairyhenderson/gomplate/releases/download/v3.9.0/gomplate_linux-amd64 - sudo chmod +x /usr/local/bin/gomplate - - - name: Set up Python ${{ env.DEFAULT_PYTHON_VERSION }} 🐍 - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b - with: - python-version: ${{ env.DEFAULT_PYTHON_VERSION }} - - - name: Read Poetry Version πŸ”’ - run: | - echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV - shell: bash - - - name: Install poetry πŸ¦„ - uses: Gr1N/setup-poetry@15821dc8a61bc630db542ae4baf6a7c19a994844 # v8 - with: - poetry-version: ${{ env.POETRY_VERSION }} - - - name: Load Poetry Cached Libraries ⬇ - id: cache-poetry - uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 - with: - path: .venv - key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-${{ env.DEFAULT_PYTHON_VERSION }}-${{ hashFiles('**/poetry.lock') }}-${{ secrets.POETRY_CACHE_VERSION }} - restore-keys: ${{ runner.os }}-poetry-${{ env.DEFAULT_PYTHON_VERSION }} - - - name: Clear Poetry cache - if: steps.cache-poetry.outputs.cache-hit == 'true' && contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests') - run: rm -r .venv - - - name: Create virtual environment - if: (steps.cache-poetry.outputs.cache-hit != 'true' || contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests')) - run: python -m venv create .venv - - - name: Set up virtual environment - run: poetry config virtualenvs.in-project true - - - name: Install Dependencies πŸ“¦ - run: | - make install-full - - - name: Lint Code 🎎 - run: | - poetry run ruff check .github --extend-ignore D - poetry run black --check .github - - - name: Test Code πŸ” - run: | - make test-gh-actions +#name: CI Github Actions +# +#on: +# push: +# branches: +# - main +# tags: +# - "*" +# pull_request: +# +#env: +# DEFAULT_PYTHON_VERSION: "3.10" +# +#jobs: +# test: +# name: Run Tests +# runs-on: ubuntu-22.04 +# #missing matrix +# strategy: +# fail-fast: false +# +# steps: +# - name: Checkout git repository πŸ• +# uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c +# +# - name: Download gomplate +# run: |- +# sudo curl -o /usr/local/bin/gomplate -sSL https://github.com/hairyhenderson/gomplate/releases/download/v3.9.0/gomplate_linux-amd64 +# sudo chmod +x /usr/local/bin/gomplate +# +# - name: Set up Python ${{ env.DEFAULT_PYTHON_VERSION }} 🐍 +# uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b +# with: +# python-version: ${{ env.DEFAULT_PYTHON_VERSION }} +# +# - name: Read Poetry Version πŸ”’ +# run: | +# echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV +# shell: bash +# +# - name: Install poetry πŸ¦„ +# uses: Gr1N/setup-poetry@15821dc8a61bc630db542ae4baf6a7c19a994844 # v8 +# with: +# poetry-version: ${{ env.POETRY_VERSION }} +# +# - name: Load Poetry Cached Libraries ⬇ +# id: cache-poetry +# uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 +# with: +# path: .venv +# key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-${{ env.DEFAULT_PYTHON_VERSION }}-${{ hashFiles('**/poetry.lock') }}-${{ secrets.POETRY_CACHE_VERSION }} +# restore-keys: ${{ runner.os }}-poetry-${{ env.DEFAULT_PYTHON_VERSION }} +# +# - name: Clear Poetry cache +# if: steps.cache-poetry.outputs.cache-hit == 'true' && contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests') +# run: rm -r .venv +# +# - name: Create virtual environment +# if: (steps.cache-poetry.outputs.cache-hit != 'true' || contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests')) +# run: python -m venv create .venv +# +# - name: Set up virtual environment +# run: poetry config virtualenvs.in-project true +# +# - name: Install Dependencies πŸ“¦ +# run: | +# make install-full +# +# - name: Lint Code 🎎 +# run: | +# poetry run ruff check .github --extend-ignore D +# poetry run black --check .github +# +# - name: Test Code πŸ” +# run: | +# make test-gh-actions diff --git a/.github/workflows/ci-model-regression.yml b/.github/workflows/ci-model-regression.yml index 84f6d34a5392..7936b9d70a3e 100644 --- a/.github/workflows/ci-model-regression.yml +++ b/.github/workflows/ci-model-regression.yml @@ -1,923 +1,923 @@ -# The docs: -# - https://www.notion.so/rasa/The-CI-for-model-regression-tests-aa579d5524a544af992f97d132bcc2de -# - https://www.notion.so/rasa/Datadog-Usage-Documentation-422099c9a3a24f5a99d92d904537dd0b -name: CI - Model Regression - -on: - push: - branches: - - "[0-9]+.[0-9]+.x" - tags: - - "**" - pull_request: - types: [opened, synchronize, labeled] - -concurrency: - group: ci-model-regression-${{ github.ref }} # branch or tag name - cancel-in-progress: true - -env: - GKE_ZONE: us-central1 - GCLOUD_VERSION: "318.0.0" - DD_PROFILING_ENABLED: false - TF_FORCE_GPU_ALLOW_GROWTH: true - NVML_INTERVAL_IN_SEC: 1 - -jobs: - read_test_configuration: - name: Reads tests configuration - if: ${{ github.repository == 'RasaHQ/rasa' && contains(github.event.pull_request.labels.*.name, 'status:model-regression-tests') }} - runs-on: ubuntu-22.04 - outputs: - matrix: ${{ steps.set-matrix.outputs.matrix }} - matrix_length: ${{ steps.set-matrix.outputs.matrix_length }} - configuration_id: ${{ steps.fc_config.outputs.comment-id }} - dataset_branch: ${{ steps.set-dataset-branch.outputs.dataset_branch }} - - steps: - - name: Checkout main - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - - - name: Download gomplate - run: |- - sudo curl -o /usr/local/bin/gomplate -sSL https://github.com/hairyhenderson/gomplate/releases/download/v3.9.0/gomplate_linux-amd64 - sudo chmod +x /usr/local/bin/gomplate - - - name: Find a comment with configuration - uses: tczekajlo/find-comment@16228d0f2100e06ea9bf8c0e7fe7287b7c6b531d - id: fc_config - with: - token: ${{ secrets.GITHUB_TOKEN }} - issue-number: ${{ github.event.number }} - body-includes: "^/modeltest" - - - run: echo ${{ steps.fc_config.outputs.comment-id }} - - # This step has to happen before the other configuration details are read from - # the same PR comment, because we need to check out the correct branch to feed the - # dataset mapping and configs into the 'Read configuration from a PR comment' step - # which creates the experiments matrix - - name: Read dataset branch from a PR comment - if: steps.fc_config.outputs.comment-id != '' - id: set-dataset-branch - run: |- - source <(gomplate -d github=https://api.github.com/repos/${{ github.repository }}/issues/comments/${{ steps.fc_config.outputs.comment-id }} -H 'github=Authorization:token ${{ secrets.GITHUB_TOKEN }}' -f .github/templates/model_regression_test_read_dataset_branch.tmpl) - echo "dataset_branch=${DATASET_BRANCH}" >> $GITHUB_OUTPUT - - - name: Checkout dataset - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - with: - repository: ${{ secrets.DATASET_REPOSITORY }} - token: ${{ secrets.ML_TEST_SA_PAT }} - path: "dataset" - ref: ${{ steps.set-dataset-branch.outputs.dataset_branch }} - - - name: Render help description from template - id: get_help_description - run: | - OUTPUT=$(gomplate -d mapping=./dataset/dataset_config_mapping.json -f .github/templates/model_regression_test_config_comment.tmpl) - OUTPUT="${OUTPUT//$'\n'/'%0A'}" - OUTPUT="${OUTPUT//$'\r'/'%0D'}" - echo "help_description=$OUTPUT" >> $GITHUB_OUTPUT - - - name: Create a comment with help description - uses: RasaHQ/create-comment@da7b2ec20116674919493bb5894eea70fdaa6486 - with: - mode: "delete-previous" - id: comment_help_description - github-token: ${{ secrets.GITHUB_TOKEN }} - body: | - ${{ steps.get_help_description.outputs.help_description }} - - - if: steps.fc_config.outputs.comment-id == '' - run: echo "::error::Cannot find a comment with the configuration" - name: Log a warning message if a configuration cannot be found - - - name: Read configuration from a PR comment - if: steps.fc_config.outputs.comment-id != '' - id: set-matrix - run: |- - matrix=$(gomplate -d mapping=./dataset/dataset_config_mapping.json -d github=https://api.github.com/repos/${{ github.repository }}/issues/comments/${{ steps.fc_config.outputs.comment-id }} -H 'github=Authorization:token ${{ secrets.GITHUB_TOKEN }}' -f .github/templates/model_regression_test_config_to_json.tmpl) - - if [ $? -ne 0 ]; then - echo "::error::Cannot read config from PR. Please double check your config." - exit 1 - fi - - matrix_length=$(echo $matrix | jq '.[] | length') - echo "matrix_length=$matrix_length" >> $GITHUB_OUTPUT - echo "matrix=$matrix" >> $GITHUB_OUTPUT - - - name: Update the comment with the configuration - uses: peter-evans/create-or-update-comment@3383acd359705b10cb1eeef05c0e88c056ea4666 - if: steps.fc_config.outputs.comment-id != '' - with: - comment-id: ${{ steps.fc_config.outputs.comment-id }} - body: | - - reactions: eyes - - - name: Re-create the comment with the configuration - uses: RasaHQ/create-comment@da7b2ec20116674919493bb5894eea70fdaa6486 - if: steps.fc_config.outputs.comment-id != '' && steps.fc_config.outputs.comment-body != '' - with: - mode: "delete-previous" - id: comment_configuration - github-token: ${{ secrets.GITHUB_TOKEN }} - body: ${{ steps.fc_config.outputs.comment-body }} - - - name: Find a comment with configuration - update - uses: tczekajlo/find-comment@16228d0f2100e06ea9bf8c0e7fe7287b7c6b531d - id: fc_config_update - with: - token: ${{ secrets.GITHUB_TOKEN }} - issue-number: ${{ github.event.number }} - body-includes: "^/modeltest" - - - name: Add reaction - uses: peter-evans/create-or-update-comment@3383acd359705b10cb1eeef05c0e88c056ea4666 - if: steps.fc_config_update.outputs.comment-id != '' - with: - edit-mode: "replace" - comment-id: ${{ steps.fc_config_update.outputs.comment-id }} - reactions: heart, hooray, rocket - - - name: Add a comment that the tests are in progress - uses: RasaHQ/create-comment@da7b2ec20116674919493bb5894eea70fdaa6486 - if: steps.fc_config_update.outputs.comment-id != '' - with: - mode: "delete-previous" - id: comment_tests_in_progress - github-token: ${{ secrets.GITHUB_TOKEN }} - body: | - The model regression tests have started. It might take a while, please be patient. - As soon as results are ready you'll see a new comment with the results. - - Used configuration can be found in [the comment.](https://github.com/${{ github.repository }}/pull/${{ github.event.number}}#issuecomment-${{ steps.fc_config_update.outputs.comment-id }}) - - deploy_runner_gpu: - name: Deploy Github Runner - GPU - needs: read_test_configuration - runs-on: ubuntu-22.04 - if: ${{ contains(github.event.pull_request.labels.*.name, 'runner:gpu') && github.repository == 'RasaHQ/rasa' && contains(github.event.pull_request.labels.*.name, 'status:model-regression-tests') && needs.read_test_configuration.outputs.configuration_id != '' }} - - steps: - - name: Checkout - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - - - name: Download gomplate - run: |- - sudo curl -o /usr/local/bin/gomplate -sSL https://github.com/hairyhenderson/gomplate/releases/download/v3.9.0/gomplate_linux-amd64 - sudo chmod +x /usr/local/bin/gomplate - - - name: Get TensorFlow version - run: |- - # Read TF version from poetry.lock file - pip install toml - TF_VERSION=$(scripts/read_tensorflow_version.sh) - # Keep the first 3 characters, e.g. we keep 2.3 if TF_VERSION is 2.3.4 - TF_VERSION=${TF_VERSION::3} - echo "TensorFlow version: $TF_VERSION" - echo TF_VERSION=$TF_VERSION >> $GITHUB_ENV - - # Use compatible CUDA/cuDNN with the given TF version - - name: Prepare GitHub runner image tag - run: |- - GH_RUNNER_IMAGE_TAG=$(jq -r 'if (.config | any(.TF == "${{ env.TF_VERSION }}" )) then (.config[] | select(.TF == "${{ env.TF_VERSION }}") | .IMAGE_TAG) else .default_image_tag end' .github/configs/tf-cuda.json) - echo "GitHub runner image tag for TensorFlow ${{ env.TF_VERSION }} is ${GH_RUNNER_IMAGE_TAG}" - echo GH_RUNNER_IMAGE_TAG=$GH_RUNNER_IMAGE_TAG >> $GITHUB_ENV - - num_max_replicas=3 - matrix_length=${{ needs.read_test_configuration.outputs.matrix_length }} - if [[ $matrix_length -gt $num_max_replicas ]]; then - NUM_REPLICAS=$num_max_replicas - else - NUM_REPLICAS=$matrix_length - fi - echo NUM_REPLICAS=$NUM_REPLICAS >> $GITHUB_ENV - - - name: Send warning if the current TF version does not have CUDA image tags configured - if: env.GH_RUNNER_IMAGE_TAG == 'latest' - env: - TF_CUDA_FILE: ./github/config/tf-cuda.json - run: |- - echo "::warning file=${TF_CUDA_FILE},line=3,col=1,endColumn=3::Missing cuda config for tf ${{ env.TF_VERSION }}. If you are not sure how to config CUDA, please reach out to infrastructure." - - - name: Notify slack on tf-cuda config updates - if: env.GH_RUNNER_IMAGE_TAG == 'latest' - env: - SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} - uses: voxmedia/github-action-slack-notify-build@3665186a8c1a022b28a1dbe0954e73aa9081ea9e - with: - channel_id: ${{ secrets.SLACK_ALERTS_CHANNEL_ID }} - status: WARNING - color: warning - - - name: Render deployment template - run: |- - export GH_RUNNER_IMAGE_TAG=${{ env.GH_RUNNER_IMAGE_TAG }} - export GH_RUNNER_IMAGE=${{ secrets.GH_RUNNER_IMAGE }} - gomplate -f .github/runner/github-runner-deployment.yaml.tmpl -o runner_deployment.yaml - - # Setup gcloud auth - - uses: google-github-actions/auth@e8df18b60c5dd38ba618c121b779307266153fbf - with: - service_account: ${{ secrets.GKE_RASA_CI_GPU_SA_NAME_RASA_CI_CD }} - credentials_json: ${{ secrets.GKE_SA_RASA_CI_CD_GPU_RASA_CI_CD }} - - # Get the GKE credentials for the cluster - - name: Get GKE Cluster Credentials - uses: google-github-actions/get-gke-credentials@894c221960ab1bc16a69902f29f090638cca753f - with: - cluster_name: ${{ secrets.GKE_GPU_CLUSTER_RASA_CI_CD }} - location: ${{ env.GKE_ZONE }} - project_id: ${{ secrets.GKE_SA_RASA_CI_GPU_PROJECT_RASA_CI_CD }} - - - name: Deploy Github Runner - run: |- - kubectl apply -f runner_deployment.yaml - kubectl -n github-runner rollout status --timeout=15m deployment/github-runner-$GITHUB_RUN_ID - - model_regression_test_gpu: - name: Model Regression Tests - GPU - needs: - - deploy_runner_gpu - - read_test_configuration - env: - # Determine where CUDA and Nvidia libraries are located. TensorFlow looks for libraries in the given paths - LD_LIBRARY_PATH: "/usr/local/cuda/extras/CUPTI/lib64:/usr/local/cuda/lib64:/usr/local/nvidia/lib:/usr/local/nvidia/lib64" - ACCELERATOR_TYPE: "GPU" - runs-on: [self-hosted, gpu, "${{ github.run_id }}"] - strategy: - # max-parallel: By default, GitHub will maximize the number of jobs run in parallel depending on the available runners on GitHub-hosted virtual machines. - matrix: ${{fromJson(needs.read_test_configuration.outputs.matrix)}} - fail-fast: false - if: ${{ contains(github.event.pull_request.labels.*.name, 'runner:gpu') && github.repository == 'RasaHQ/rasa' && contains(github.event.pull_request.labels.*.name, 'status:model-regression-tests') && needs.read_test_configuration.outputs.configuration_id != '' }} - - steps: - - name: Checkout - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - - - name: Checkout dataset - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - with: - repository: ${{ secrets.DATASET_REPOSITORY }} - token: ${{ secrets.ML_TEST_SA_PAT }} - path: "dataset" - ref: ${{ needs.read_test_configuration.outputs.dataset_branch }} - - - name: Set env variables - id: set_dataset_config_vars - env: - DATASET_NAME: "${{ matrix.dataset }}" - CONFIG_NAME: "${{ matrix.config }}" - run: |- - # determine extra environment variables - # - CONFIG - # - DATASET - # - IS_EXTERNAL - # - EXTERNAL_DATASET_REPOSITORY_BRANCH - # - TRAIN_DIR - # - TEST_DIR - # - DOMAIN_FILE - source <(gomplate -d mapping=./dataset/dataset_config_mapping.json -f .github/templates/configuration_variables.tmpl) - - # Not all configurations are available for all datasets. - # The job will fail and the workflow continues, if the configuration file doesn't exist - # for a given dataset - - echo "is_dataset_exists=true" >> $GITHUB_OUTPUT - echo "is_config_exists=true" >> $GITHUB_OUTPUT - echo "is_external=${IS_EXTERNAL}" >> $GITHUB_OUTPUT - - # Warn about job if dataset is Hermit and config is BERT + DIET(seq) + ResponseSelector(t2t) or Sparse + BERT + DIET(seq) + ResponseSelector(t2t) - if [[ "${{ matrix.dataset }}" == "Hermit" && "${{ matrix.config }}" =~ "BERT + DIET(seq) + ResponseSelector(t2t)" ]]; then - echo "::warning::This ${{ matrix.dataset }} dataset / ${{ matrix.config }} config is currently being skipped on scheduled tests due to OOM associated with the upgrade to TF 2.6. You may see OOM here." - fi - - if [[ "${IS_EXTERNAL}" == "true" ]]; then - echo "DATASET_DIR=dataset_external" >> $GITHUB_ENV - else - echo "DATASET_DIR=dataset" >> $GITHUB_ENV - test -d dataset/$DATASET || (echo "::warning::The ${{ matrix.dataset }} dataset doesn't exist. Skipping the job." \ - && echo "is_config_exists=false" >> $GITHUB_OUTPUT && exit 0) - fi - - # Skip job if a given type is not available for a given dataset - if [[ -z "${DOMAIN_FILE}" && "${{ matrix.type }}" == "core" ]]; then - echo "::warning::The ${{ matrix.dataset }} dataset doesn't include core type. Skipping the job." \ - && echo "is_config_exists=false" >> $GITHUB_OUTPUT && exit 0 - fi - - test -f dataset/configs/$CONFIG || (echo "::warning::The ${{ matrix.config }} configuration file doesn't exist. Skipping the job." \ - && echo "is_dataset_exists=false" >> $GITHUB_OUTPUT && exit 0) - - echo "DATASET=${DATASET}" >> $GITHUB_ENV - echo "CONFIG=${CONFIG}" >> $GITHUB_ENV - echo "DOMAIN_FILE=${DOMAIN_FILE}" >> $GITHUB_ENV - echo "EXTERNAL_DATASET_REPOSITORY_BRANCH=${EXTERNAL_DATASET_REPOSITORY_BRANCH}" >> $GITHUB_ENV - echo "IS_EXTERNAL=${IS_EXTERNAL}" >> $GITHUB_ENV - - if [[ -z "${TRAIN_DIR}" ]]; then - echo "TRAIN_DIR=train" >> $GITHUB_ENV - else - echo "TRAIN_DIR=${TRAIN_DIR}" >> $GITHUB_ENV - fi - - if [[ -z "${TEST_DIR}" ]]; then - echo "TEST_DIR=test" >> $GITHUB_ENV - else - echo "TEST_DIR=${TEST_DIR}" >> $GITHUB_ENV - fi - - HOST_NAME=`hostname` - echo "HOST_NAME=${HOST_NAME}" >> $GITHUB_ENV - - - name: Checkout dataset - external - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - if: steps.set_dataset_config_vars.outputs.is_external == 'true' - with: - repository: ${{ env.DATASET }} - token: ${{ secrets.ML_TEST_SA_PAT }} - path: "dataset_external" - ref: ${{ env.EXTERNAL_DATASET_REPOSITORY_BRANCH }} - - - name: Set dataset commit - id: set-dataset-commit - working-directory: ${{ env.DATASET_DIR }} - run: | - DATASET_COMMIT=$(git rev-parse HEAD) - echo $DATASET_COMMIT - echo "dataset_commit=$DATASET_COMMIT" >> $GITHUB_OUTPUT - - - name: Start Datadog Agent - if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' - env: - DATASET_NAME: "${{ matrix.dataset }}" - CONFIG: "${{ matrix.config }}" - DATASET_COMMIT: "${{ steps.set-dataset-commit.outputs.dataset_commit }}" - BRANCH: ${{ github.head_ref }} - GITHUB_SHA: "${{ github.sha }}" - PR_ID: "${{ github.event.number }}" - TYPE: "${{ matrix.type }}" - DATASET_REPOSITORY_BRANCH: ${{ needs.read_test_configuration.outputs.dataset_branch }} - INDEX_REPETITION: "${{ matrix.index_repetition }}" - run: | - export PR_URL="https://github.com/${GITHUB_REPOSITORY}/pull/${{ github.event.number }}" - .github/scripts/start_dd_agent.sh "${{ secrets.DD_API_KEY }}" "${{ env.ACCELERATOR_TYPE }}" ${{ env.NVML_INTERVAL_IN_SEC }} - - - name: Set up Python 3.10 🐍 - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b - if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' - with: - python-version: '3.10' - - - name: Read Poetry Version πŸ”’ - if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' - run: | - echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV - shell: bash - - - name: Install poetry πŸ¦„ - uses: Gr1N/setup-poetry@15821dc8a61bc630db542ae4baf6a7c19a994844 # v8 - if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' - with: - poetry-version: ${{ env.POETRY_VERSION }} - - - name: Load Poetry Cached Libraries ⬇ - uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 - if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' - with: - path: ~/.cache/pypoetry/virtualenvs - key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-3.9-${{ hashFiles('**/poetry.lock') }}-${{ secrets.POETRY_CACHE_VERSION }} - - - name: Install Dependencies πŸ“¦ - if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' - run: | - make install-full - poetry run python -m spacy download de_core_news_md - - - name: Install datadog dependencies - if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' - run: poetry run pip install -U datadog-api-client ddtrace - - - name: Validate that GPUs are working - if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' - run: |- - poetry run python .github/scripts/validate_gpus.py - - - name: Download pretrained models πŸ’ͺ - if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' - run: |- - poetry run python .github/scripts/download_pretrained.py --config dataset/configs/${CONFIG} - - - name: Run test - id: run_test - if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' - env: - TFHUB_CACHE_DIR: ~/.tfhub_cache/ - OMP_NUM_THREADS: 1 - run: |- - poetry run rasa --version - - export NOW_TRAIN=$(gomplate -i '{{ (time.Now).Format time.RFC3339}}'); - cd ${{ github.workspace }} - - if [[ "${{ steps.set_dataset_config_vars.outputs.is_external }}" == "true" ]]; then - export DATASET=. - fi - - if [[ "${{ matrix.type }}" == "nlu" ]]; then - poetry run ddtrace-run rasa train nlu --quiet -u ${DATASET_DIR}/${DATASET}/${TRAIN_DIR} -c dataset/configs/${CONFIG} --out ${DATASET_DIR}/models/${DATASET}/${CONFIG} - echo "train_run_time=$(gomplate -i '{{ $t := time.Parse time.RFC3339 (getenv "NOW_TRAIN") }}{{ (time.Since $t).Round (time.Second 1) }}')" >> $GITHUB_OUTPUT - - export NOW_TEST=$(gomplate -i '{{ (time.Now).Format time.RFC3339}}'); - poetry run ddtrace-run rasa test nlu --quiet -u ${DATASET_DIR}/$DATASET/${TEST_DIR} -m ${DATASET_DIR}/models/$DATASET/$CONFIG --out ${{ github.workspace }}/results/$DATASET/$CONFIG - - echo "test_run_time=$(gomplate -i '{{ $t := time.Parse time.RFC3339 (getenv "NOW_TEST") }}{{ (time.Since $t).Round (time.Second 1) }}')" >> $GITHUB_OUTPUT - echo "total_run_time=$(gomplate -i '{{ $t := time.Parse time.RFC3339 (getenv "NOW_TRAIN") }}{{ (time.Since $t).Round (time.Second 1) }}')" >> $GITHUB_OUTPUT - - elif [[ "${{ matrix.type }}" == "core" ]]; then - poetry run ddtrace-run rasa train core --quiet -s ${DATASET_DIR}/$DATASET/$TRAIN_DIR -c dataset/configs/$CONFIG -d ${DATASET_DIR}/${DATASET}/${DOMAIN_FILE} - echo "train_run_time=$(gomplate -i '{{ $t := time.Parse time.RFC3339 (getenv "NOW_TRAIN") }}{{ (time.Since $t).Round (time.Second 1) }}')" >> $GITHUB_OUTPUT - - export NOW_TEST=$(gomplate -i '{{ (time.Now).Format time.RFC3339}}'); - poetry run ddtrace-run rasa test core -s ${DATASET_DIR}/${DATASET}/${TEST_DIR} --out ${{ github.workspace }}/results/${{ matrix.dataset }}/${CONFIG} - - echo "test_run_time=$(gomplate -i '{{ $t := time.Parse time.RFC3339 (getenv "NOW_TEST") }}{{ (time.Since $t).Round (time.Second 1) }}')" >> $GITHUB_OUTPUT - echo "total_run_time=$(gomplate -i '{{ $t := time.Parse time.RFC3339 (getenv "NOW_TRAIN") }}{{ (time.Since $t).Round (time.Second 1) }}')" >> $GITHUB_OUTPUT - fi - - - name: Generate a JSON file with a report / Publish results to Datadog - if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' - env: - SUMMARY_FILE: "./report.json" - DATASET_NAME: ${{ matrix.dataset }} - RESULT_DIR: "${{ github.workspace }}/results" - CONFIG: ${{ matrix.config }} - TEST_RUN_TIME: ${{ steps.run_test.outputs.test_run_time }} - TRAIN_RUN_TIME: ${{ steps.run_test.outputs.train_run_time }} - TOTAL_RUN_TIME: ${{ steps.run_test.outputs.total_run_time }} - DATASET_REPOSITORY_BRANCH: ${{ needs.read_test_configuration.outputs.dataset_branch }} - TYPE: ${{ matrix.type }} - INDEX_REPETITION: ${{ matrix.index_repetition }} - DATASET_COMMIT: ${{ steps.set-dataset-commit.outputs.dataset_commit }} - BRANCH: ${{ github.head_ref }} - GITHUB_SHA: "${{ github.sha }}" - PR_ID: "${{ github.event.number }}" - DD_APP_KEY: ${{ secrets.DD_APP_KEY_PERF_TEST }} - DD_API_KEY: ${{ secrets.DD_API_KEY }} - DD_SITE: datadoghq.eu - run: |- - export PR_URL="https://github.com/${GITHUB_REPOSITORY}/pull/${{ github.event.number }}" - poetry run pip install analytics-python - poetry run python .github/scripts/mr_publish_results.py - cat $SUMMARY_FILE - - - name: Upload an artifact with the report - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce - if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' - with: - name: report-${{ matrix.dataset }}-${{ matrix.config }}-${{ matrix.index_repetition }} - path: report.json - - - name: Stop Datadog Agent - if: ${{ always() && steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' }} - run: | - sudo service datadog-agent stop - - model_regression_test_cpu: - name: Model Regression Tests - CPU - needs: - - read_test_configuration - env: - ACCELERATOR_TYPE: "CPU" - runs-on: ubuntu-22.04 - strategy: - max-parallel: 3 - matrix: ${{fromJson(needs.read_test_configuration.outputs.matrix)}} - fail-fast: false - if: ${{ !contains(github.event.pull_request.labels.*.name, 'runner:gpu') && github.repository == 'RasaHQ/rasa' && contains(github.event.pull_request.labels.*.name, 'status:model-regression-tests') && needs.read_test_configuration.outputs.configuration_id != '' }} - - steps: - - name: Checkout - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - - - name: Checkout dataset - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - with: - repository: ${{ secrets.DATASET_REPOSITORY }} - token: ${{ secrets.ML_TEST_SA_PAT }} - path: "dataset" - ref: ${{ needs.read_test_configuration.outputs.dataset_branch }} - - - name: Download gomplate - run: |- - sudo curl -o /usr/local/bin/gomplate -sSL https://github.com/hairyhenderson/gomplate/releases/download/v3.9.0/gomplate_linux-amd64 - sudo chmod +x /usr/local/bin/gomplate - - - name: Set env variables - id: set_dataset_config_vars - env: - DATASET_NAME: "${{ matrix.dataset }}" - CONFIG_NAME: "${{ matrix.config }}" - run: |- - # determine extra environment variables - # - CONFIG - # - DATASET - # - IS_EXTERNAL - # - EXTERNAL_DATASET_REPOSITORY_BRANCH - # - TRAIN_DIR - # - TEST_DIR - # - DOMAIN_FILE - source <(gomplate -d mapping=./dataset/dataset_config_mapping.json -f .github/templates/configuration_variables.tmpl) - - # Not all configurations are available for all datasets. - # The job will fail and the workflow continues, if the configuration file doesn't exist - # for a given dataset - - echo "is_dataset_exists=true" >> $GITHUB_OUTPUT - echo "is_config_exists=true" >> $GITHUB_OUTPUT - echo "is_external=${IS_EXTERNAL}" >> $GITHUB_OUTPUT - - if [[ "${IS_EXTERNAL}" == "true" ]]; then - echo "DATASET_DIR=dataset_external" >> $GITHUB_ENV - else - echo "DATASET_DIR=dataset" >> $GITHUB_ENV - test -d dataset/$DATASET || (echo "::warning::The ${{ matrix.dataset }} dataset doesn't exist. Skipping the job." \ - && echo "is_config_exists=false" >> $GITHUB_OUTPUT && exit 0) - fi - - # Skip job if a given type is not available for a given dataset - if [[ -z "${DOMAIN_FILE}" && "${{ matrix.type }}" == "core" ]]; then - echo "::warning::The ${{ matrix.dataset }} dataset doesn't include core type. Skipping the job." \ - && echo "is_config_exists=false" >> $GITHUB_OUTPUT && exit 0 - fi - - test -f dataset/configs/$CONFIG || (echo "::warning::The ${{ matrix.config }} configuration file doesn't exist. Skipping the job." \ - && echo "is_dataset_exists=false" >> $GITHUB_OUTPUT && exit 0) - - echo "DATASET=${DATASET}" >> $GITHUB_ENV - echo "CONFIG=${CONFIG}" >> $GITHUB_ENV - echo "DOMAIN_FILE=${DOMAIN_FILE}" >> $GITHUB_ENV - echo "EXTERNAL_DATASET_REPOSITORY_BRANCH=${EXTERNAL_DATASET_REPOSITORY_BRANCH}" >> $GITHUB_ENV - echo "IS_EXTERNAL=${IS_EXTERNAL}" >> $GITHUB_ENV - - if [[ -z "${TRAIN_DIR}" ]]; then - echo "TRAIN_DIR=train" >> $GITHUB_ENV - else - echo "TRAIN_DIR=${TRAIN_DIR}" >> $GITHUB_ENV - fi - - if [[ -z "${TEST_DIR}" ]]; then - echo "TEST_DIR=test" >> $GITHUB_ENV - else - echo "TEST_DIR=${TEST_DIR}" >> $GITHUB_ENV - fi - - HOST_NAME=`hostname` - echo "HOST_NAME=${HOST_NAME}" >> $GITHUB_ENV - - - name: Checkout dataset - external - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - if: steps.set_dataset_config_vars.outputs.is_external == 'true' - with: - repository: ${{ env.DATASET }} - token: ${{ secrets.ML_TEST_SA_PAT }} - path: "dataset_external" - ref: ${{ env.EXTERNAL_DATASET_REPOSITORY_BRANCH }} - - - name: Set dataset commit - id: set-dataset-commit - working-directory: ${{ env.DATASET_DIR }} - run: | - DATASET_COMMIT=$(git rev-parse HEAD) - echo $DATASET_COMMIT - echo "dataset_commit=$DATASET_COMMIT" >> $GITHUB_OUTPUT - - - name: Start Datadog Agent - if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' - env: - DATASET_NAME: "${{ matrix.dataset }}" - CONFIG: "${{ matrix.config }}" - DATASET_COMMIT: "${{ steps.set-dataset-commit.outputs.dataset_commit }}" - BRANCH: ${{ github.head_ref }} - GITHUB_SHA: "${{ github.sha }}" - PR_ID: "${{ github.event.number }}" - TYPE: "${{ matrix.type }}" - DATASET_REPOSITORY_BRANCH: ${{ matrix.dataset_branch }} - INDEX_REPETITION: "${{ matrix.index_repetition }}" - run: | - export PR_URL="https://github.com/${GITHUB_REPOSITORY}/pull/${{ github.event.number }}" - .github/scripts/start_dd_agent.sh "${{ secrets.DD_API_KEY }}" "${{ env.ACCELERATOR_TYPE }}" ${{ env.NVML_INTERVAL_IN_SEC }} - - - name: Set up Python 3.10 🐍 - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b - if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' - with: - python-version: '3.10' - - - name: Read Poetry Version πŸ”’ - if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' - run: | - echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV - shell: bash - - - name: Install poetry πŸ¦„ - uses: Gr1N/setup-poetry@15821dc8a61bc630db542ae4baf6a7c19a994844 # v8 - if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' - with: - poetry-version: ${{ env.POETRY_VERSION }} - - - name: Load Poetry Cached Libraries ⬇ - uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 - if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' - with: - path: ~/.cache/pypoetry/virtualenvs - key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-3.9-${{ hashFiles('**/poetry.lock') }}-${{ secrets.POETRY_CACHE_VERSION }} - - - name: Install Dependencies πŸ“¦ - if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' - run: | - make install-full - poetry run python -m spacy download de_core_news_md - - - name: Install datadog dependencies - if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' - run: poetry run pip install -U datadog-api-client ddtrace - - - name: CPU run - Validate that no GPUs are available - if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' - run: |- - poetry run python .github/scripts/validate_cpu.py - - - name: Download pretrained models πŸ’ͺ - if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' - run: |- - poetry run python .github/scripts/download_pretrained.py --config dataset/configs/${CONFIG} - - - name: Run test - id: run_test - if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' - env: - TFHUB_CACHE_DIR: ~/.tfhub_cache/ - OMP_NUM_THREADS: 1 - run: |- - poetry run rasa --version - - export NOW_TRAIN=$(gomplate -i '{{ (time.Now).Format time.RFC3339}}'); - cd ${{ github.workspace }} - - if [[ "${{ steps.set_dataset_config_vars.outputs.is_external }}" == "true" ]]; then - export DATASET=. - fi - - if [[ "${{ matrix.type }}" == "nlu" ]]; then - poetry run ddtrace-run rasa train nlu --quiet -u ${DATASET_DIR}/${DATASET}/${TRAIN_DIR} -c dataset/configs/${CONFIG} --out ${DATASET_DIR}/models/${DATASET}/${CONFIG} - echo "train_run_time=$(gomplate -i '{{ $t := time.Parse time.RFC3339 (getenv "NOW_TRAIN") }}{{ (time.Since $t).Round (time.Second 1) }}')" >> $GITHUB_OUTPUT - - export NOW_TEST=$(gomplate -i '{{ (time.Now).Format time.RFC3339}}'); - poetry run ddtrace-run rasa test nlu --quiet -u ${DATASET_DIR}/$DATASET/${TEST_DIR} -m ${DATASET_DIR}/models/$DATASET/$CONFIG --out ${{ github.workspace }}/results/$DATASET/$CONFIG - - echo "test_run_time=$(gomplate -i '{{ $t := time.Parse time.RFC3339 (getenv "NOW_TEST") }}{{ (time.Since $t).Round (time.Second 1) }}')" >> $GITHUB_OUTPUT - echo "total_run_time=$(gomplate -i '{{ $t := time.Parse time.RFC3339 (getenv "NOW_TRAIN") }}{{ (time.Since $t).Round (time.Second 1) }}')" >> $GITHUB_OUTPUT - - elif [[ "${{ matrix.type }}" == "core" ]]; then - poetry run ddtrace-run rasa train core --quiet -s ${DATASET_DIR}/$DATASET/$TRAIN_DIR -c dataset/configs/$CONFIG -d ${DATASET_DIR}/${DATASET}/${DOMAIN_FILE} - echo "train_run_time=$(gomplate -i '{{ $t := time.Parse time.RFC3339 (getenv "NOW_TRAIN") }}{{ (time.Since $t).Round (time.Second 1) }}')" >> $GITHUB_OUTPUT - - export NOW_TEST=$(gomplate -i '{{ (time.Now).Format time.RFC3339}}'); - poetry run ddtrace-run rasa test core -s ${DATASET_DIR}/${DATASET}/${TEST_DIR} --out ${{ github.workspace }}/results/${{ matrix.dataset }}/${CONFIG} - - echo "test_run_time=$(gomplate -i '{{ $t := time.Parse time.RFC3339 (getenv "NOW_TEST") }}{{ (time.Since $t).Round (time.Second 1) }}')" >> $GITHUB_OUTPUT - echo "total_run_time=$(gomplate -i '{{ $t := time.Parse time.RFC3339 (getenv "NOW_TRAIN") }}{{ (time.Since $t).Round (time.Second 1) }}')" >> $GITHUB_OUTPUT - fi - - - name: Generate a JSON file with a report / Publish results to Datadog - if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' - env: - SUMMARY_FILE: "./report.json" - DATASET_NAME: ${{ matrix.dataset }} - RESULT_DIR: "${{ github.workspace }}/results" - CONFIG: ${{ matrix.config }} - TEST_RUN_TIME: ${{ steps.run_test.outputs.test_run_time }} - TRAIN_RUN_TIME: ${{ steps.run_test.outputs.train_run_time }} - TOTAL_RUN_TIME: ${{ steps.run_test.outputs.total_run_time }} - DATASET_REPOSITORY_BRANCH: ${{ needs.read_test_configuration.outputs.dataset_branch }} - TYPE: ${{ matrix.type }} - INDEX_REPETITION: ${{ matrix.index_repetition }} - DATASET_COMMIT: ${{ steps.set-dataset-commit.outputs.dataset_commit }} - BRANCH: ${{ github.head_ref }} - GITHUB_SHA: "${{ github.sha }}" - PR_ID: "${{ github.event.number }}" - DD_APP_KEY: ${{ secrets.DD_APP_KEY_PERF_TEST }} - DD_API_KEY: ${{ secrets.DD_API_KEY }} - DD_SITE: datadoghq.eu - run: |- - export PR_URL="https://github.com/${GITHUB_REPOSITORY}/pull/${{ github.event.number }}" - poetry run pip install analytics-python - poetry run python .github/scripts/mr_publish_results.py - cat $SUMMARY_FILE - - - name: Upload an artifact with the report - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce - if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' - with: - name: report-${{ matrix.dataset }}-${{ matrix.config }}-${{ matrix.index_repetition }} - path: report.json - - - name: Stop Datadog Agent - if: ${{ always() && steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' }} - run: | - sudo service datadog-agent stop - - combine_reports: - name: Combine reports - runs-on: ubuntu-22.04 - needs: - - model_regression_test_cpu - - model_regression_test_gpu - if: ${{ always() && ((needs.model_regression_test_cpu.result != 'skipped') != (needs.model_regression_test_gpu.result != 'skipped')) }} - outputs: - success_status: ${{ steps.set-success-status.outputs.success_status }} - - steps: - - name: Set success status - id: set-success-status - run: |- - succeeded=${{ needs.model_regression_test_cpu.result == 'success' || needs.model_regression_test_gpu.result == 'success' }} - if [[ $succeeded == "false" ]]; then - success_status="Failed" - elif [[ $succeeded == "true" ]]; then - success_status="Succeeded" - else - success_status="Unknown" - fi - echo $success_status - echo "success_status=$success_status" >> $GITHUB_OUTPUT - - - name: Checkout git repository πŸ• - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - - - name: Set up Python 3.10 🐍 - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b - with: - python-version: '3.10' - - - name: Get reports - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a - with: - path: reports/ - - - name: Display structure of downloaded files - continue-on-error: true - run: ls -R - working-directory: reports/ - - - name: Merge all reports - env: - SUMMARY_FILE: "./report.json" - REPORTS_DIR: "reports/" - run: | - python .github/scripts/mr_generate_summary.py - cat $SUMMARY_FILE - - - name: Upload an artifact with the overall report - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce - with: - name: report.json - path: ./report.json - - set_job_success_status: - name: Set job success status - runs-on: ubuntu-22.04 - needs: - - combine_reports - if: ${{ always() && needs.combine_reports.result == 'success' }} - steps: - - name: Set return code - run: | - success_status=${{ needs.combine_reports.outputs.success_status }} - echo "Status: $success_status" - if [[ $success_status == "Succeeded" ]]; then - exit 0 - else - exit 1 - fi - - add_comment_results: - name: Add a comment with the results - runs-on: ubuntu-22.04 - needs: - - combine_reports - if: ${{ always() && needs.combine_reports.result == 'success' }} - - steps: - - name: Checkout - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - - - name: Download report from last on-schedule regression test - run: | - # Get ID of last on-schedule workflow - SCHEDULE_ID=$(curl -X GET -s -H 'Authorization: token ${{ secrets.GITHUB_TOKEN }}' -H "Accept: application/vnd.github.v3+json" \ - "https://api.github.com/repos/${{ github.repository }}/actions/workflows" \ - | jq -r '.workflows[] | select(.name == "CI - Model Regression on schedule") | select(.path | test("schedule")) | .id') - - ARTIFACT_URL=$(curl -s -H 'Authorization: token ${{ secrets.GITHUB_TOKEN }}' -H "Accept: application/vnd.github.v3+json" \ - "https://api.github.com/repos/${{ github.repository }}/actions/workflows/${SCHEDULE_ID}/runs?event=schedule&status=completed&branch=main&per_page=1" | jq -r .workflow_runs[0].artifacts_url) - - DOWNLOAD_URL=$(curl -s -H 'Authorization: token ${{ secrets.GITHUB_TOKEN }}' -H "Accept: application/vnd.github.v3+json" "${ARTIFACT_URL}" \ - | jq -r '.artifacts[] | select(.name == "report.json") | .archive_download_url') - - # Download the artifact - curl -H 'Authorization: token ${{ secrets.GITHUB_TOKEN }}' -LJO -H "Accept: application/vnd.github.v3+json" $DOWNLOAD_URL - - # Unzip and change name - unzip report.json.zip && mv report.json report_main.json - - - name: Download the report - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a - with: - name: report.json - - - name: Download gomplate - run: |- - sudo curl -o /usr/local/bin/gomplate -sSL https://github.com/hairyhenderson/gomplate/releases/download/v3.9.0/gomplate_linux-amd64 - sudo chmod +x /usr/local/bin/gomplate - - - name: Render a comment to add - id: get_results - run: | - OUTPUT="$(gomplate -d data=report.json -d results_main=report_main.json -f .github/templates/model_regression_test_results.tmpl)" - OUTPUT="${OUTPUT//$'\n'/'%0A'}" - OUTPUT="${OUTPUT//$'\r'/'%0D'}" - echo "result=$OUTPUT" >> $GITHUB_OUTPUT - - # Get time of current commit as start time - TIME_ISO_COMMIT=$(gomplate -d github=https://api.github.com/repos/rasaHQ/rasa/commits/${{ github.sha }} -H 'github=Authorization:token ${{ secrets.GITHUB_TOKEN }}' -i '{{ (ds "github").commit.author.date }}') # Example "2022-02-17T14:06:38Z" - TIME_UNIX_COMMIT=$(date -d "${TIME_ISO_COMMIT}" +%s%3N) # Example: "1645106798" - - # Get current time - TIME_ISO_NOW=$(gomplate -i '{{ (time.Now).UTC.Format time.RFC3339}}') # Example: "2022-02-17T14:50:54Z%" - TIME_UNIX_NOW=$(date -d "${TIME_ISO_NOW}" +%s%3N) # Example: "1645118083" - - echo "from_ts=$TIME_UNIX_COMMIT" >> $GITHUB_OUTPUT - echo "to_ts=$TIME_UNIX_NOW" >> $GITHUB_OUTPUT - - - name: Publish results as a PR comment - uses: marocchino/sticky-pull-request-comment@f61b6cf21ef2fcc468f4345cdfcc9bda741d2343 # v2.6.2 - if: ${{ always() }} - with: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - header: ${{ github.run_id }} - append: true - message: |- - Status of the run: ${{ needs.combine_reports.outputs.success_status }} - - Commit: ${{ github.sha }}, [The full report is available as an artifact.](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}) - - [Datadog dashboard link](https://app.datadoghq.eu/dashboard/mf4-2hu-x84?tpl_var_branch_baseline=${{ github.head_ref }}&from_ts=${{ steps.get_results.outputs.from_ts }}&to_ts=${{ steps.get_results.outputs.to_ts }}&live=false) - - ${{ steps.get_results.outputs.result }} - - - name: Remove 'status:model-regression-tests' label - continue-on-error: true - uses: buildsville/add-remove-label@6008d7bd99d3baeb7c04033584e68f8ec80b198b # v1.0 - with: - token: ${{secrets.GITHUB_TOKEN}} - label: "status:model-regression-tests" - type: remove - - - name: Remove 'runner:gpu' label - continue-on-error: true - uses: buildsville/add-remove-label@6008d7bd99d3baeb7c04033584e68f8ec80b198b # v1.0 - with: - token: ${{secrets.GITHUB_TOKEN}} - label: "runner:gpu" - type: remove - - remove_runner_gpu: - name: Delete Github Runner - GPU - needs: - - deploy_runner_gpu - - model_regression_test_gpu - runs-on: ubuntu-22.04 - if: ${{ always() && needs.deploy_runner_gpu.result != 'skipped' && contains(github.event.pull_request.labels.*.name, 'runner:gpu') && contains(github.event.pull_request.labels.*.name, 'status:model-regression-tests') }} - - steps: - # Setup gcloud auth - - uses: google-github-actions/auth@e8df18b60c5dd38ba618c121b779307266153fbf - with: - service_account: ${{ secrets.GKE_RASA_CI_GPU_SA_NAME_RASA_CI_CD }} - credentials_json: ${{ secrets.GKE_SA_RASA_CI_CD_GPU_RASA_CI_CD }} - - # Get the GKE credentials for the cluster - - name: Get GKE Cluster Credentials - uses: google-github-actions/get-gke-credentials@894c221960ab1bc16a69902f29f090638cca753f - with: - cluster_name: ${{ secrets.GKE_GPU_CLUSTER_RASA_CI_CD }} - location: ${{ env.GKE_ZONE }} - project_id: ${{ secrets.GKE_SA_RASA_CI_GPU_PROJECT_RASA_CI_CD }} - - - name: Remove Github Runner - run: kubectl -n github-runner delete deployments github-runner-${GITHUB_RUN_ID} --grace-period=30 +## The docs: +## - https://www.notion.so/rasa/The-CI-for-model-regression-tests-aa579d5524a544af992f97d132bcc2de +## - https://www.notion.so/rasa/Datadog-Usage-Documentation-422099c9a3a24f5a99d92d904537dd0b +#name: CI - Model Regression +# +#on: +# push: +# branches: +# - "[0-9]+.[0-9]+.x" +# tags: +# - "**" +# pull_request: +# types: [opened, synchronize, labeled] +# +#concurrency: +# group: ci-model-regression-${{ github.ref }} # branch or tag name +# cancel-in-progress: true +# +#env: +# GKE_ZONE: us-central1 +# GCLOUD_VERSION: "318.0.0" +# DD_PROFILING_ENABLED: false +# TF_FORCE_GPU_ALLOW_GROWTH: true +# NVML_INTERVAL_IN_SEC: 1 +# +#jobs: +# read_test_configuration: +# name: Reads tests configuration +# if: ${{ github.repository == 'RasaHQ/rasa' && contains(github.event.pull_request.labels.*.name, 'status:model-regression-tests') }} +# runs-on: ubuntu-22.04 +# outputs: +# matrix: ${{ steps.set-matrix.outputs.matrix }} +# matrix_length: ${{ steps.set-matrix.outputs.matrix_length }} +# configuration_id: ${{ steps.fc_config.outputs.comment-id }} +# dataset_branch: ${{ steps.set-dataset-branch.outputs.dataset_branch }} +# +# steps: +# - name: Checkout main +# uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c +# +# - name: Download gomplate +# run: |- +# sudo curl -o /usr/local/bin/gomplate -sSL https://github.com/hairyhenderson/gomplate/releases/download/v3.9.0/gomplate_linux-amd64 +# sudo chmod +x /usr/local/bin/gomplate +# +# - name: Find a comment with configuration +# uses: tczekajlo/find-comment@16228d0f2100e06ea9bf8c0e7fe7287b7c6b531d +# id: fc_config +# with: +# token: ${{ secrets.GITHUB_TOKEN }} +# issue-number: ${{ github.event.number }} +# body-includes: "^/modeltest" +# +# - run: echo ${{ steps.fc_config.outputs.comment-id }} +# +# # This step has to happen before the other configuration details are read from +# # the same PR comment, because we need to check out the correct branch to feed the +# # dataset mapping and configs into the 'Read configuration from a PR comment' step +# # which creates the experiments matrix +# - name: Read dataset branch from a PR comment +# if: steps.fc_config.outputs.comment-id != '' +# id: set-dataset-branch +# run: |- +# source <(gomplate -d github=https://api.github.com/repos/${{ github.repository }}/issues/comments/${{ steps.fc_config.outputs.comment-id }} -H 'github=Authorization:token ${{ secrets.GITHUB_TOKEN }}' -f .github/templates/model_regression_test_read_dataset_branch.tmpl) +# echo "dataset_branch=${DATASET_BRANCH}" >> $GITHUB_OUTPUT +# +# - name: Checkout dataset +# uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c +# with: +# repository: ${{ secrets.DATASET_REPOSITORY }} +# token: ${{ secrets.ML_TEST_SA_PAT }} +# path: "dataset" +# ref: ${{ steps.set-dataset-branch.outputs.dataset_branch }} +# +# - name: Render help description from template +# id: get_help_description +# run: | +# OUTPUT=$(gomplate -d mapping=./dataset/dataset_config_mapping.json -f .github/templates/model_regression_test_config_comment.tmpl) +# OUTPUT="${OUTPUT//$'\n'/'%0A'}" +# OUTPUT="${OUTPUT//$'\r'/'%0D'}" +# echo "help_description=$OUTPUT" >> $GITHUB_OUTPUT +# +# - name: Create a comment with help description +# uses: RasaHQ/create-comment@da7b2ec20116674919493bb5894eea70fdaa6486 +# with: +# mode: "delete-previous" +# id: comment_help_description +# github-token: ${{ secrets.GITHUB_TOKEN }} +# body: | +# ${{ steps.get_help_description.outputs.help_description }} +# +# - if: steps.fc_config.outputs.comment-id == '' +# run: echo "::error::Cannot find a comment with the configuration" +# name: Log a warning message if a configuration cannot be found +# +# - name: Read configuration from a PR comment +# if: steps.fc_config.outputs.comment-id != '' +# id: set-matrix +# run: |- +# matrix=$(gomplate -d mapping=./dataset/dataset_config_mapping.json -d github=https://api.github.com/repos/${{ github.repository }}/issues/comments/${{ steps.fc_config.outputs.comment-id }} -H 'github=Authorization:token ${{ secrets.GITHUB_TOKEN }}' -f .github/templates/model_regression_test_config_to_json.tmpl) +# +# if [ $? -ne 0 ]; then +# echo "::error::Cannot read config from PR. Please double check your config." +# exit 1 +# fi +# +# matrix_length=$(echo $matrix | jq '.[] | length') +# echo "matrix_length=$matrix_length" >> $GITHUB_OUTPUT +# echo "matrix=$matrix" >> $GITHUB_OUTPUT +# +# - name: Update the comment with the configuration +# uses: peter-evans/create-or-update-comment@3383acd359705b10cb1eeef05c0e88c056ea4666 +# if: steps.fc_config.outputs.comment-id != '' +# with: +# comment-id: ${{ steps.fc_config.outputs.comment-id }} +# body: | +# +# reactions: eyes +# +# - name: Re-create the comment with the configuration +# uses: RasaHQ/create-comment@da7b2ec20116674919493bb5894eea70fdaa6486 +# if: steps.fc_config.outputs.comment-id != '' && steps.fc_config.outputs.comment-body != '' +# with: +# mode: "delete-previous" +# id: comment_configuration +# github-token: ${{ secrets.GITHUB_TOKEN }} +# body: ${{ steps.fc_config.outputs.comment-body }} +# +# - name: Find a comment with configuration - update +# uses: tczekajlo/find-comment@16228d0f2100e06ea9bf8c0e7fe7287b7c6b531d +# id: fc_config_update +# with: +# token: ${{ secrets.GITHUB_TOKEN }} +# issue-number: ${{ github.event.number }} +# body-includes: "^/modeltest" +# +# - name: Add reaction +# uses: peter-evans/create-or-update-comment@3383acd359705b10cb1eeef05c0e88c056ea4666 +# if: steps.fc_config_update.outputs.comment-id != '' +# with: +# edit-mode: "replace" +# comment-id: ${{ steps.fc_config_update.outputs.comment-id }} +# reactions: heart, hooray, rocket +# +# - name: Add a comment that the tests are in progress +# uses: RasaHQ/create-comment@da7b2ec20116674919493bb5894eea70fdaa6486 +# if: steps.fc_config_update.outputs.comment-id != '' +# with: +# mode: "delete-previous" +# id: comment_tests_in_progress +# github-token: ${{ secrets.GITHUB_TOKEN }} +# body: | +# The model regression tests have started. It might take a while, please be patient. +# As soon as results are ready you'll see a new comment with the results. +# +# Used configuration can be found in [the comment.](https://github.com/${{ github.repository }}/pull/${{ github.event.number}}#issuecomment-${{ steps.fc_config_update.outputs.comment-id }}) +# +# deploy_runner_gpu: +# name: Deploy Github Runner - GPU +# needs: read_test_configuration +# runs-on: ubuntu-22.04 +# if: ${{ contains(github.event.pull_request.labels.*.name, 'runner:gpu') && github.repository == 'RasaHQ/rasa' && contains(github.event.pull_request.labels.*.name, 'status:model-regression-tests') && needs.read_test_configuration.outputs.configuration_id != '' }} +# +# steps: +# - name: Checkout +# uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c +# +# - name: Download gomplate +# run: |- +# sudo curl -o /usr/local/bin/gomplate -sSL https://github.com/hairyhenderson/gomplate/releases/download/v3.9.0/gomplate_linux-amd64 +# sudo chmod +x /usr/local/bin/gomplate +# +# - name: Get TensorFlow version +# run: |- +# # Read TF version from poetry.lock file +# pip install toml +# TF_VERSION=$(scripts/read_tensorflow_version.sh) +# # Keep the first 3 characters, e.g. we keep 2.3 if TF_VERSION is 2.3.4 +# TF_VERSION=${TF_VERSION::3} +# echo "TensorFlow version: $TF_VERSION" +# echo TF_VERSION=$TF_VERSION >> $GITHUB_ENV +# +# # Use compatible CUDA/cuDNN with the given TF version +# - name: Prepare GitHub runner image tag +# run: |- +# GH_RUNNER_IMAGE_TAG=$(jq -r 'if (.config | any(.TF == "${{ env.TF_VERSION }}" )) then (.config[] | select(.TF == "${{ env.TF_VERSION }}") | .IMAGE_TAG) else .default_image_tag end' .github/configs/tf-cuda.json) +# echo "GitHub runner image tag for TensorFlow ${{ env.TF_VERSION }} is ${GH_RUNNER_IMAGE_TAG}" +# echo GH_RUNNER_IMAGE_TAG=$GH_RUNNER_IMAGE_TAG >> $GITHUB_ENV +# +# num_max_replicas=3 +# matrix_length=${{ needs.read_test_configuration.outputs.matrix_length }} +# if [[ $matrix_length -gt $num_max_replicas ]]; then +# NUM_REPLICAS=$num_max_replicas +# else +# NUM_REPLICAS=$matrix_length +# fi +# echo NUM_REPLICAS=$NUM_REPLICAS >> $GITHUB_ENV +# +# - name: Send warning if the current TF version does not have CUDA image tags configured +# if: env.GH_RUNNER_IMAGE_TAG == 'latest' +# env: +# TF_CUDA_FILE: ./github/config/tf-cuda.json +# run: |- +# echo "::warning file=${TF_CUDA_FILE},line=3,col=1,endColumn=3::Missing cuda config for tf ${{ env.TF_VERSION }}. If you are not sure how to config CUDA, please reach out to infrastructure." +# +# - name: Notify slack on tf-cuda config updates +# if: env.GH_RUNNER_IMAGE_TAG == 'latest' +# env: +# SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} +# uses: voxmedia/github-action-slack-notify-build@3665186a8c1a022b28a1dbe0954e73aa9081ea9e +# with: +# channel_id: ${{ secrets.SLACK_ALERTS_CHANNEL_ID }} +# status: WARNING +# color: warning +# +# - name: Render deployment template +# run: |- +# export GH_RUNNER_IMAGE_TAG=${{ env.GH_RUNNER_IMAGE_TAG }} +# export GH_RUNNER_IMAGE=${{ secrets.GH_RUNNER_IMAGE }} +# gomplate -f .github/runner/github-runner-deployment.yaml.tmpl -o runner_deployment.yaml +# +# # Setup gcloud auth +# - uses: google-github-actions/auth@e8df18b60c5dd38ba618c121b779307266153fbf +# with: +# service_account: ${{ secrets.GKE_RASA_CI_GPU_SA_NAME_RASA_CI_CD }} +# credentials_json: ${{ secrets.GKE_SA_RASA_CI_CD_GPU_RASA_CI_CD }} +# +# # Get the GKE credentials for the cluster +# - name: Get GKE Cluster Credentials +# uses: google-github-actions/get-gke-credentials@894c221960ab1bc16a69902f29f090638cca753f +# with: +# cluster_name: ${{ secrets.GKE_GPU_CLUSTER_RASA_CI_CD }} +# location: ${{ env.GKE_ZONE }} +# project_id: ${{ secrets.GKE_SA_RASA_CI_GPU_PROJECT_RASA_CI_CD }} +# +# - name: Deploy Github Runner +# run: |- +# kubectl apply -f runner_deployment.yaml +# kubectl -n github-runner rollout status --timeout=15m deployment/github-runner-$GITHUB_RUN_ID +# +# model_regression_test_gpu: +# name: Model Regression Tests - GPU +# needs: +# - deploy_runner_gpu +# - read_test_configuration +# env: +# # Determine where CUDA and Nvidia libraries are located. TensorFlow looks for libraries in the given paths +# LD_LIBRARY_PATH: "/usr/local/cuda/extras/CUPTI/lib64:/usr/local/cuda/lib64:/usr/local/nvidia/lib:/usr/local/nvidia/lib64" +# ACCELERATOR_TYPE: "GPU" +# runs-on: [self-hosted, gpu, "${{ github.run_id }}"] +# strategy: +# # max-parallel: By default, GitHub will maximize the number of jobs run in parallel depending on the available runners on GitHub-hosted virtual machines. +# matrix: ${{fromJson(needs.read_test_configuration.outputs.matrix)}} +# fail-fast: false +# if: ${{ contains(github.event.pull_request.labels.*.name, 'runner:gpu') && github.repository == 'RasaHQ/rasa' && contains(github.event.pull_request.labels.*.name, 'status:model-regression-tests') && needs.read_test_configuration.outputs.configuration_id != '' }} +# +# steps: +# - name: Checkout +# uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c +# +# - name: Checkout dataset +# uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c +# with: +# repository: ${{ secrets.DATASET_REPOSITORY }} +# token: ${{ secrets.ML_TEST_SA_PAT }} +# path: "dataset" +# ref: ${{ needs.read_test_configuration.outputs.dataset_branch }} +# +# - name: Set env variables +# id: set_dataset_config_vars +# env: +# DATASET_NAME: "${{ matrix.dataset }}" +# CONFIG_NAME: "${{ matrix.config }}" +# run: |- +# # determine extra environment variables +# # - CONFIG +# # - DATASET +# # - IS_EXTERNAL +# # - EXTERNAL_DATASET_REPOSITORY_BRANCH +# # - TRAIN_DIR +# # - TEST_DIR +# # - DOMAIN_FILE +# source <(gomplate -d mapping=./dataset/dataset_config_mapping.json -f .github/templates/configuration_variables.tmpl) +# +# # Not all configurations are available for all datasets. +# # The job will fail and the workflow continues, if the configuration file doesn't exist +# # for a given dataset +# +# echo "is_dataset_exists=true" >> $GITHUB_OUTPUT +# echo "is_config_exists=true" >> $GITHUB_OUTPUT +# echo "is_external=${IS_EXTERNAL}" >> $GITHUB_OUTPUT +# +# # Warn about job if dataset is Hermit and config is BERT + DIET(seq) + ResponseSelector(t2t) or Sparse + BERT + DIET(seq) + ResponseSelector(t2t) +# if [[ "${{ matrix.dataset }}" == "Hermit" && "${{ matrix.config }}" =~ "BERT + DIET(seq) + ResponseSelector(t2t)" ]]; then +# echo "::warning::This ${{ matrix.dataset }} dataset / ${{ matrix.config }} config is currently being skipped on scheduled tests due to OOM associated with the upgrade to TF 2.6. You may see OOM here." +# fi +# +# if [[ "${IS_EXTERNAL}" == "true" ]]; then +# echo "DATASET_DIR=dataset_external" >> $GITHUB_ENV +# else +# echo "DATASET_DIR=dataset" >> $GITHUB_ENV +# test -d dataset/$DATASET || (echo "::warning::The ${{ matrix.dataset }} dataset doesn't exist. Skipping the job." \ +# && echo "is_config_exists=false" >> $GITHUB_OUTPUT && exit 0) +# fi +# +# # Skip job if a given type is not available for a given dataset +# if [[ -z "${DOMAIN_FILE}" && "${{ matrix.type }}" == "core" ]]; then +# echo "::warning::The ${{ matrix.dataset }} dataset doesn't include core type. Skipping the job." \ +# && echo "is_config_exists=false" >> $GITHUB_OUTPUT && exit 0 +# fi +# +# test -f dataset/configs/$CONFIG || (echo "::warning::The ${{ matrix.config }} configuration file doesn't exist. Skipping the job." \ +# && echo "is_dataset_exists=false" >> $GITHUB_OUTPUT && exit 0) +# +# echo "DATASET=${DATASET}" >> $GITHUB_ENV +# echo "CONFIG=${CONFIG}" >> $GITHUB_ENV +# echo "DOMAIN_FILE=${DOMAIN_FILE}" >> $GITHUB_ENV +# echo "EXTERNAL_DATASET_REPOSITORY_BRANCH=${EXTERNAL_DATASET_REPOSITORY_BRANCH}" >> $GITHUB_ENV +# echo "IS_EXTERNAL=${IS_EXTERNAL}" >> $GITHUB_ENV +# +# if [[ -z "${TRAIN_DIR}" ]]; then +# echo "TRAIN_DIR=train" >> $GITHUB_ENV +# else +# echo "TRAIN_DIR=${TRAIN_DIR}" >> $GITHUB_ENV +# fi +# +# if [[ -z "${TEST_DIR}" ]]; then +# echo "TEST_DIR=test" >> $GITHUB_ENV +# else +# echo "TEST_DIR=${TEST_DIR}" >> $GITHUB_ENV +# fi +# +# HOST_NAME=`hostname` +# echo "HOST_NAME=${HOST_NAME}" >> $GITHUB_ENV +# +# - name: Checkout dataset - external +# uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c +# if: steps.set_dataset_config_vars.outputs.is_external == 'true' +# with: +# repository: ${{ env.DATASET }} +# token: ${{ secrets.ML_TEST_SA_PAT }} +# path: "dataset_external" +# ref: ${{ env.EXTERNAL_DATASET_REPOSITORY_BRANCH }} +# +# - name: Set dataset commit +# id: set-dataset-commit +# working-directory: ${{ env.DATASET_DIR }} +# run: | +# DATASET_COMMIT=$(git rev-parse HEAD) +# echo $DATASET_COMMIT +# echo "dataset_commit=$DATASET_COMMIT" >> $GITHUB_OUTPUT +# +# - name: Start Datadog Agent +# if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' +# env: +# DATASET_NAME: "${{ matrix.dataset }}" +# CONFIG: "${{ matrix.config }}" +# DATASET_COMMIT: "${{ steps.set-dataset-commit.outputs.dataset_commit }}" +# BRANCH: ${{ github.head_ref }} +# GITHUB_SHA: "${{ github.sha }}" +# PR_ID: "${{ github.event.number }}" +# TYPE: "${{ matrix.type }}" +# DATASET_REPOSITORY_BRANCH: ${{ needs.read_test_configuration.outputs.dataset_branch }} +# INDEX_REPETITION: "${{ matrix.index_repetition }}" +# run: | +# export PR_URL="https://github.com/${GITHUB_REPOSITORY}/pull/${{ github.event.number }}" +# .github/scripts/start_dd_agent.sh "${{ secrets.DD_API_KEY }}" "${{ env.ACCELERATOR_TYPE }}" ${{ env.NVML_INTERVAL_IN_SEC }} +# +# - name: Set up Python 3.10 🐍 +# uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b +# if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' +# with: +# python-version: '3.10' +# +# - name: Read Poetry Version πŸ”’ +# if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' +# run: | +# echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV +# shell: bash +# +# - name: Install poetry πŸ¦„ +# uses: Gr1N/setup-poetry@15821dc8a61bc630db542ae4baf6a7c19a994844 # v8 +# if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' +# with: +# poetry-version: ${{ env.POETRY_VERSION }} +# +# - name: Load Poetry Cached Libraries ⬇ +# uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 +# if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' +# with: +# path: ~/.cache/pypoetry/virtualenvs +# key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-3.9-${{ hashFiles('**/poetry.lock') }}-${{ secrets.POETRY_CACHE_VERSION }} +# +# - name: Install Dependencies πŸ“¦ +# if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' +# run: | +# make install-full +# poetry run python -m spacy download de_core_news_md +# +# - name: Install datadog dependencies +# if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' +# run: poetry run pip install -U datadog-api-client ddtrace +# +# - name: Validate that GPUs are working +# if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' +# run: |- +# poetry run python .github/scripts/validate_gpus.py +# +# - name: Download pretrained models πŸ’ͺ +# if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' +# run: |- +# poetry run python .github/scripts/download_pretrained.py --config dataset/configs/${CONFIG} +# +# - name: Run test +# id: run_test +# if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' +# env: +# TFHUB_CACHE_DIR: ~/.tfhub_cache/ +# OMP_NUM_THREADS: 1 +# run: |- +# poetry run rasa --version +# +# export NOW_TRAIN=$(gomplate -i '{{ (time.Now).Format time.RFC3339}}'); +# cd ${{ github.workspace }} +# +# if [[ "${{ steps.set_dataset_config_vars.outputs.is_external }}" == "true" ]]; then +# export DATASET=. +# fi +# +# if [[ "${{ matrix.type }}" == "nlu" ]]; then +# poetry run ddtrace-run rasa train nlu --quiet -u ${DATASET_DIR}/${DATASET}/${TRAIN_DIR} -c dataset/configs/${CONFIG} --out ${DATASET_DIR}/models/${DATASET}/${CONFIG} +# echo "train_run_time=$(gomplate -i '{{ $t := time.Parse time.RFC3339 (getenv "NOW_TRAIN") }}{{ (time.Since $t).Round (time.Second 1) }}')" >> $GITHUB_OUTPUT +# +# export NOW_TEST=$(gomplate -i '{{ (time.Now).Format time.RFC3339}}'); +# poetry run ddtrace-run rasa test nlu --quiet -u ${DATASET_DIR}/$DATASET/${TEST_DIR} -m ${DATASET_DIR}/models/$DATASET/$CONFIG --out ${{ github.workspace }}/results/$DATASET/$CONFIG +# +# echo "test_run_time=$(gomplate -i '{{ $t := time.Parse time.RFC3339 (getenv "NOW_TEST") }}{{ (time.Since $t).Round (time.Second 1) }}')" >> $GITHUB_OUTPUT +# echo "total_run_time=$(gomplate -i '{{ $t := time.Parse time.RFC3339 (getenv "NOW_TRAIN") }}{{ (time.Since $t).Round (time.Second 1) }}')" >> $GITHUB_OUTPUT +# +# elif [[ "${{ matrix.type }}" == "core" ]]; then +# poetry run ddtrace-run rasa train core --quiet -s ${DATASET_DIR}/$DATASET/$TRAIN_DIR -c dataset/configs/$CONFIG -d ${DATASET_DIR}/${DATASET}/${DOMAIN_FILE} +# echo "train_run_time=$(gomplate -i '{{ $t := time.Parse time.RFC3339 (getenv "NOW_TRAIN") }}{{ (time.Since $t).Round (time.Second 1) }}')" >> $GITHUB_OUTPUT +# +# export NOW_TEST=$(gomplate -i '{{ (time.Now).Format time.RFC3339}}'); +# poetry run ddtrace-run rasa test core -s ${DATASET_DIR}/${DATASET}/${TEST_DIR} --out ${{ github.workspace }}/results/${{ matrix.dataset }}/${CONFIG} +# +# echo "test_run_time=$(gomplate -i '{{ $t := time.Parse time.RFC3339 (getenv "NOW_TEST") }}{{ (time.Since $t).Round (time.Second 1) }}')" >> $GITHUB_OUTPUT +# echo "total_run_time=$(gomplate -i '{{ $t := time.Parse time.RFC3339 (getenv "NOW_TRAIN") }}{{ (time.Since $t).Round (time.Second 1) }}')" >> $GITHUB_OUTPUT +# fi +# +# - name: Generate a JSON file with a report / Publish results to Datadog +# if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' +# env: +# SUMMARY_FILE: "./report.json" +# DATASET_NAME: ${{ matrix.dataset }} +# RESULT_DIR: "${{ github.workspace }}/results" +# CONFIG: ${{ matrix.config }} +# TEST_RUN_TIME: ${{ steps.run_test.outputs.test_run_time }} +# TRAIN_RUN_TIME: ${{ steps.run_test.outputs.train_run_time }} +# TOTAL_RUN_TIME: ${{ steps.run_test.outputs.total_run_time }} +# DATASET_REPOSITORY_BRANCH: ${{ needs.read_test_configuration.outputs.dataset_branch }} +# TYPE: ${{ matrix.type }} +# INDEX_REPETITION: ${{ matrix.index_repetition }} +# DATASET_COMMIT: ${{ steps.set-dataset-commit.outputs.dataset_commit }} +# BRANCH: ${{ github.head_ref }} +# GITHUB_SHA: "${{ github.sha }}" +# PR_ID: "${{ github.event.number }}" +# DD_APP_KEY: ${{ secrets.DD_APP_KEY_PERF_TEST }} +# DD_API_KEY: ${{ secrets.DD_API_KEY }} +# DD_SITE: datadoghq.eu +# run: |- +# export PR_URL="https://github.com/${GITHUB_REPOSITORY}/pull/${{ github.event.number }}" +# poetry run pip install analytics-python +# poetry run python .github/scripts/mr_publish_results.py +# cat $SUMMARY_FILE +# +# - name: Upload an artifact with the report +# uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce +# if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' +# with: +# name: report-${{ matrix.dataset }}-${{ matrix.config }}-${{ matrix.index_repetition }} +# path: report.json +# +# - name: Stop Datadog Agent +# if: ${{ always() && steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' }} +# run: | +# sudo service datadog-agent stop +# +# model_regression_test_cpu: +# name: Model Regression Tests - CPU +# needs: +# - read_test_configuration +# env: +# ACCELERATOR_TYPE: "CPU" +# runs-on: ubuntu-22.04 +# strategy: +# max-parallel: 3 +# matrix: ${{fromJson(needs.read_test_configuration.outputs.matrix)}} +# fail-fast: false +# if: ${{ !contains(github.event.pull_request.labels.*.name, 'runner:gpu') && github.repository == 'RasaHQ/rasa' && contains(github.event.pull_request.labels.*.name, 'status:model-regression-tests') && needs.read_test_configuration.outputs.configuration_id != '' }} +# +# steps: +# - name: Checkout +# uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c +# +# - name: Checkout dataset +# uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c +# with: +# repository: ${{ secrets.DATASET_REPOSITORY }} +# token: ${{ secrets.ML_TEST_SA_PAT }} +# path: "dataset" +# ref: ${{ needs.read_test_configuration.outputs.dataset_branch }} +# +# - name: Download gomplate +# run: |- +# sudo curl -o /usr/local/bin/gomplate -sSL https://github.com/hairyhenderson/gomplate/releases/download/v3.9.0/gomplate_linux-amd64 +# sudo chmod +x /usr/local/bin/gomplate +# +# - name: Set env variables +# id: set_dataset_config_vars +# env: +# DATASET_NAME: "${{ matrix.dataset }}" +# CONFIG_NAME: "${{ matrix.config }}" +# run: |- +# # determine extra environment variables +# # - CONFIG +# # - DATASET +# # - IS_EXTERNAL +# # - EXTERNAL_DATASET_REPOSITORY_BRANCH +# # - TRAIN_DIR +# # - TEST_DIR +# # - DOMAIN_FILE +# source <(gomplate -d mapping=./dataset/dataset_config_mapping.json -f .github/templates/configuration_variables.tmpl) +# +# # Not all configurations are available for all datasets. +# # The job will fail and the workflow continues, if the configuration file doesn't exist +# # for a given dataset +# +# echo "is_dataset_exists=true" >> $GITHUB_OUTPUT +# echo "is_config_exists=true" >> $GITHUB_OUTPUT +# echo "is_external=${IS_EXTERNAL}" >> $GITHUB_OUTPUT +# +# if [[ "${IS_EXTERNAL}" == "true" ]]; then +# echo "DATASET_DIR=dataset_external" >> $GITHUB_ENV +# else +# echo "DATASET_DIR=dataset" >> $GITHUB_ENV +# test -d dataset/$DATASET || (echo "::warning::The ${{ matrix.dataset }} dataset doesn't exist. Skipping the job." \ +# && echo "is_config_exists=false" >> $GITHUB_OUTPUT && exit 0) +# fi +# +# # Skip job if a given type is not available for a given dataset +# if [[ -z "${DOMAIN_FILE}" && "${{ matrix.type }}" == "core" ]]; then +# echo "::warning::The ${{ matrix.dataset }} dataset doesn't include core type. Skipping the job." \ +# && echo "is_config_exists=false" >> $GITHUB_OUTPUT && exit 0 +# fi +# +# test -f dataset/configs/$CONFIG || (echo "::warning::The ${{ matrix.config }} configuration file doesn't exist. Skipping the job." \ +# && echo "is_dataset_exists=false" >> $GITHUB_OUTPUT && exit 0) +# +# echo "DATASET=${DATASET}" >> $GITHUB_ENV +# echo "CONFIG=${CONFIG}" >> $GITHUB_ENV +# echo "DOMAIN_FILE=${DOMAIN_FILE}" >> $GITHUB_ENV +# echo "EXTERNAL_DATASET_REPOSITORY_BRANCH=${EXTERNAL_DATASET_REPOSITORY_BRANCH}" >> $GITHUB_ENV +# echo "IS_EXTERNAL=${IS_EXTERNAL}" >> $GITHUB_ENV +# +# if [[ -z "${TRAIN_DIR}" ]]; then +# echo "TRAIN_DIR=train" >> $GITHUB_ENV +# else +# echo "TRAIN_DIR=${TRAIN_DIR}" >> $GITHUB_ENV +# fi +# +# if [[ -z "${TEST_DIR}" ]]; then +# echo "TEST_DIR=test" >> $GITHUB_ENV +# else +# echo "TEST_DIR=${TEST_DIR}" >> $GITHUB_ENV +# fi +# +# HOST_NAME=`hostname` +# echo "HOST_NAME=${HOST_NAME}" >> $GITHUB_ENV +# +# - name: Checkout dataset - external +# uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c +# if: steps.set_dataset_config_vars.outputs.is_external == 'true' +# with: +# repository: ${{ env.DATASET }} +# token: ${{ secrets.ML_TEST_SA_PAT }} +# path: "dataset_external" +# ref: ${{ env.EXTERNAL_DATASET_REPOSITORY_BRANCH }} +# +# - name: Set dataset commit +# id: set-dataset-commit +# working-directory: ${{ env.DATASET_DIR }} +# run: | +# DATASET_COMMIT=$(git rev-parse HEAD) +# echo $DATASET_COMMIT +# echo "dataset_commit=$DATASET_COMMIT" >> $GITHUB_OUTPUT +# +# - name: Start Datadog Agent +# if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' +# env: +# DATASET_NAME: "${{ matrix.dataset }}" +# CONFIG: "${{ matrix.config }}" +# DATASET_COMMIT: "${{ steps.set-dataset-commit.outputs.dataset_commit }}" +# BRANCH: ${{ github.head_ref }} +# GITHUB_SHA: "${{ github.sha }}" +# PR_ID: "${{ github.event.number }}" +# TYPE: "${{ matrix.type }}" +# DATASET_REPOSITORY_BRANCH: ${{ matrix.dataset_branch }} +# INDEX_REPETITION: "${{ matrix.index_repetition }}" +# run: | +# export PR_URL="https://github.com/${GITHUB_REPOSITORY}/pull/${{ github.event.number }}" +# .github/scripts/start_dd_agent.sh "${{ secrets.DD_API_KEY }}" "${{ env.ACCELERATOR_TYPE }}" ${{ env.NVML_INTERVAL_IN_SEC }} +# +# - name: Set up Python 3.10 🐍 +# uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b +# if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' +# with: +# python-version: '3.10' +# +# - name: Read Poetry Version πŸ”’ +# if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' +# run: | +# echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV +# shell: bash +# +# - name: Install poetry πŸ¦„ +# uses: Gr1N/setup-poetry@15821dc8a61bc630db542ae4baf6a7c19a994844 # v8 +# if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' +# with: +# poetry-version: ${{ env.POETRY_VERSION }} +# +# - name: Load Poetry Cached Libraries ⬇ +# uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 +# if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' +# with: +# path: ~/.cache/pypoetry/virtualenvs +# key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-3.9-${{ hashFiles('**/poetry.lock') }}-${{ secrets.POETRY_CACHE_VERSION }} +# +# - name: Install Dependencies πŸ“¦ +# if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' +# run: | +# make install-full +# poetry run python -m spacy download de_core_news_md +# +# - name: Install datadog dependencies +# if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' +# run: poetry run pip install -U datadog-api-client ddtrace +# +# - name: CPU run - Validate that no GPUs are available +# if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' +# run: |- +# poetry run python .github/scripts/validate_cpu.py +# +# - name: Download pretrained models πŸ’ͺ +# if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' +# run: |- +# poetry run python .github/scripts/download_pretrained.py --config dataset/configs/${CONFIG} +# +# - name: Run test +# id: run_test +# if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' +# env: +# TFHUB_CACHE_DIR: ~/.tfhub_cache/ +# OMP_NUM_THREADS: 1 +# run: |- +# poetry run rasa --version +# +# export NOW_TRAIN=$(gomplate -i '{{ (time.Now).Format time.RFC3339}}'); +# cd ${{ github.workspace }} +# +# if [[ "${{ steps.set_dataset_config_vars.outputs.is_external }}" == "true" ]]; then +# export DATASET=. +# fi +# +# if [[ "${{ matrix.type }}" == "nlu" ]]; then +# poetry run ddtrace-run rasa train nlu --quiet -u ${DATASET_DIR}/${DATASET}/${TRAIN_DIR} -c dataset/configs/${CONFIG} --out ${DATASET_DIR}/models/${DATASET}/${CONFIG} +# echo "train_run_time=$(gomplate -i '{{ $t := time.Parse time.RFC3339 (getenv "NOW_TRAIN") }}{{ (time.Since $t).Round (time.Second 1) }}')" >> $GITHUB_OUTPUT +# +# export NOW_TEST=$(gomplate -i '{{ (time.Now).Format time.RFC3339}}'); +# poetry run ddtrace-run rasa test nlu --quiet -u ${DATASET_DIR}/$DATASET/${TEST_DIR} -m ${DATASET_DIR}/models/$DATASET/$CONFIG --out ${{ github.workspace }}/results/$DATASET/$CONFIG +# +# echo "test_run_time=$(gomplate -i '{{ $t := time.Parse time.RFC3339 (getenv "NOW_TEST") }}{{ (time.Since $t).Round (time.Second 1) }}')" >> $GITHUB_OUTPUT +# echo "total_run_time=$(gomplate -i '{{ $t := time.Parse time.RFC3339 (getenv "NOW_TRAIN") }}{{ (time.Since $t).Round (time.Second 1) }}')" >> $GITHUB_OUTPUT +# +# elif [[ "${{ matrix.type }}" == "core" ]]; then +# poetry run ddtrace-run rasa train core --quiet -s ${DATASET_DIR}/$DATASET/$TRAIN_DIR -c dataset/configs/$CONFIG -d ${DATASET_DIR}/${DATASET}/${DOMAIN_FILE} +# echo "train_run_time=$(gomplate -i '{{ $t := time.Parse time.RFC3339 (getenv "NOW_TRAIN") }}{{ (time.Since $t).Round (time.Second 1) }}')" >> $GITHUB_OUTPUT +# +# export NOW_TEST=$(gomplate -i '{{ (time.Now).Format time.RFC3339}}'); +# poetry run ddtrace-run rasa test core -s ${DATASET_DIR}/${DATASET}/${TEST_DIR} --out ${{ github.workspace }}/results/${{ matrix.dataset }}/${CONFIG} +# +# echo "test_run_time=$(gomplate -i '{{ $t := time.Parse time.RFC3339 (getenv "NOW_TEST") }}{{ (time.Since $t).Round (time.Second 1) }}')" >> $GITHUB_OUTPUT +# echo "total_run_time=$(gomplate -i '{{ $t := time.Parse time.RFC3339 (getenv "NOW_TRAIN") }}{{ (time.Since $t).Round (time.Second 1) }}')" >> $GITHUB_OUTPUT +# fi +# +# - name: Generate a JSON file with a report / Publish results to Datadog +# if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' +# env: +# SUMMARY_FILE: "./report.json" +# DATASET_NAME: ${{ matrix.dataset }} +# RESULT_DIR: "${{ github.workspace }}/results" +# CONFIG: ${{ matrix.config }} +# TEST_RUN_TIME: ${{ steps.run_test.outputs.test_run_time }} +# TRAIN_RUN_TIME: ${{ steps.run_test.outputs.train_run_time }} +# TOTAL_RUN_TIME: ${{ steps.run_test.outputs.total_run_time }} +# DATASET_REPOSITORY_BRANCH: ${{ needs.read_test_configuration.outputs.dataset_branch }} +# TYPE: ${{ matrix.type }} +# INDEX_REPETITION: ${{ matrix.index_repetition }} +# DATASET_COMMIT: ${{ steps.set-dataset-commit.outputs.dataset_commit }} +# BRANCH: ${{ github.head_ref }} +# GITHUB_SHA: "${{ github.sha }}" +# PR_ID: "${{ github.event.number }}" +# DD_APP_KEY: ${{ secrets.DD_APP_KEY_PERF_TEST }} +# DD_API_KEY: ${{ secrets.DD_API_KEY }} +# DD_SITE: datadoghq.eu +# run: |- +# export PR_URL="https://github.com/${GITHUB_REPOSITORY}/pull/${{ github.event.number }}" +# poetry run pip install analytics-python +# poetry run python .github/scripts/mr_publish_results.py +# cat $SUMMARY_FILE +# +# - name: Upload an artifact with the report +# uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce +# if: steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' +# with: +# name: report-${{ matrix.dataset }}-${{ matrix.config }}-${{ matrix.index_repetition }} +# path: report.json +# +# - name: Stop Datadog Agent +# if: ${{ always() && steps.set_dataset_config_vars.outputs.is_dataset_exists == 'true' && steps.set_dataset_config_vars.outputs.is_config_exists == 'true' }} +# run: | +# sudo service datadog-agent stop +# +# combine_reports: +# name: Combine reports +# runs-on: ubuntu-22.04 +# needs: +# - model_regression_test_cpu +# - model_regression_test_gpu +# if: ${{ always() && ((needs.model_regression_test_cpu.result != 'skipped') != (needs.model_regression_test_gpu.result != 'skipped')) }} +# outputs: +# success_status: ${{ steps.set-success-status.outputs.success_status }} +# +# steps: +# - name: Set success status +# id: set-success-status +# run: |- +# succeeded=${{ needs.model_regression_test_cpu.result == 'success' || needs.model_regression_test_gpu.result == 'success' }} +# if [[ $succeeded == "false" ]]; then +# success_status="Failed" +# elif [[ $succeeded == "true" ]]; then +# success_status="Succeeded" +# else +# success_status="Unknown" +# fi +# echo $success_status +# echo "success_status=$success_status" >> $GITHUB_OUTPUT +# +# - name: Checkout git repository πŸ• +# uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c +# +# - name: Set up Python 3.10 🐍 +# uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b +# with: +# python-version: '3.10' +# +# - name: Get reports +# uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a +# with: +# path: reports/ +# +# - name: Display structure of downloaded files +# continue-on-error: true +# run: ls -R +# working-directory: reports/ +# +# - name: Merge all reports +# env: +# SUMMARY_FILE: "./report.json" +# REPORTS_DIR: "reports/" +# run: | +# python .github/scripts/mr_generate_summary.py +# cat $SUMMARY_FILE +# +# - name: Upload an artifact with the overall report +# uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce +# with: +# name: report.json +# path: ./report.json +# +# set_job_success_status: +# name: Set job success status +# runs-on: ubuntu-22.04 +# needs: +# - combine_reports +# if: ${{ always() && needs.combine_reports.result == 'success' }} +# steps: +# - name: Set return code +# run: | +# success_status=${{ needs.combine_reports.outputs.success_status }} +# echo "Status: $success_status" +# if [[ $success_status == "Succeeded" ]]; then +# exit 0 +# else +# exit 1 +# fi +# +# add_comment_results: +# name: Add a comment with the results +# runs-on: ubuntu-22.04 +# needs: +# - combine_reports +# if: ${{ always() && needs.combine_reports.result == 'success' }} +# +# steps: +# - name: Checkout +# uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c +# +# - name: Download report from last on-schedule regression test +# run: | +# # Get ID of last on-schedule workflow +# SCHEDULE_ID=$(curl -X GET -s -H 'Authorization: token ${{ secrets.GITHUB_TOKEN }}' -H "Accept: application/vnd.github.v3+json" \ +# "https://api.github.com/repos/${{ github.repository }}/actions/workflows" \ +# | jq -r '.workflows[] | select(.name == "CI - Model Regression on schedule") | select(.path | test("schedule")) | .id') +# +# ARTIFACT_URL=$(curl -s -H 'Authorization: token ${{ secrets.GITHUB_TOKEN }}' -H "Accept: application/vnd.github.v3+json" \ +# "https://api.github.com/repos/${{ github.repository }}/actions/workflows/${SCHEDULE_ID}/runs?event=schedule&status=completed&branch=main&per_page=1" | jq -r .workflow_runs[0].artifacts_url) +# +# DOWNLOAD_URL=$(curl -s -H 'Authorization: token ${{ secrets.GITHUB_TOKEN }}' -H "Accept: application/vnd.github.v3+json" "${ARTIFACT_URL}" \ +# | jq -r '.artifacts[] | select(.name == "report.json") | .archive_download_url') +# +# # Download the artifact +# curl -H 'Authorization: token ${{ secrets.GITHUB_TOKEN }}' -LJO -H "Accept: application/vnd.github.v3+json" $DOWNLOAD_URL +# +# # Unzip and change name +# unzip report.json.zip && mv report.json report_main.json +# +# - name: Download the report +# uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a +# with: +# name: report.json +# +# - name: Download gomplate +# run: |- +# sudo curl -o /usr/local/bin/gomplate -sSL https://github.com/hairyhenderson/gomplate/releases/download/v3.9.0/gomplate_linux-amd64 +# sudo chmod +x /usr/local/bin/gomplate +# +# - name: Render a comment to add +# id: get_results +# run: | +# OUTPUT="$(gomplate -d data=report.json -d results_main=report_main.json -f .github/templates/model_regression_test_results.tmpl)" +# OUTPUT="${OUTPUT//$'\n'/'%0A'}" +# OUTPUT="${OUTPUT//$'\r'/'%0D'}" +# echo "result=$OUTPUT" >> $GITHUB_OUTPUT +# +# # Get time of current commit as start time +# TIME_ISO_COMMIT=$(gomplate -d github=https://api.github.com/repos/rasaHQ/rasa/commits/${{ github.sha }} -H 'github=Authorization:token ${{ secrets.GITHUB_TOKEN }}' -i '{{ (ds "github").commit.author.date }}') # Example "2022-02-17T14:06:38Z" +# TIME_UNIX_COMMIT=$(date -d "${TIME_ISO_COMMIT}" +%s%3N) # Example: "1645106798" +# +# # Get current time +# TIME_ISO_NOW=$(gomplate -i '{{ (time.Now).UTC.Format time.RFC3339}}') # Example: "2022-02-17T14:50:54Z%" +# TIME_UNIX_NOW=$(date -d "${TIME_ISO_NOW}" +%s%3N) # Example: "1645118083" +# +# echo "from_ts=$TIME_UNIX_COMMIT" >> $GITHUB_OUTPUT +# echo "to_ts=$TIME_UNIX_NOW" >> $GITHUB_OUTPUT +# +# - name: Publish results as a PR comment +# uses: marocchino/sticky-pull-request-comment@f61b6cf21ef2fcc468f4345cdfcc9bda741d2343 # v2.6.2 +# if: ${{ always() }} +# with: +# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} +# header: ${{ github.run_id }} +# append: true +# message: |- +# Status of the run: ${{ needs.combine_reports.outputs.success_status }} +# +# Commit: ${{ github.sha }}, [The full report is available as an artifact.](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}) +# +# [Datadog dashboard link](https://app.datadoghq.eu/dashboard/mf4-2hu-x84?tpl_var_branch_baseline=${{ github.head_ref }}&from_ts=${{ steps.get_results.outputs.from_ts }}&to_ts=${{ steps.get_results.outputs.to_ts }}&live=false) +# +# ${{ steps.get_results.outputs.result }} +# +# - name: Remove 'status:model-regression-tests' label +# continue-on-error: true +# uses: buildsville/add-remove-label@6008d7bd99d3baeb7c04033584e68f8ec80b198b # v1.0 +# with: +# token: ${{secrets.GITHUB_TOKEN}} +# label: "status:model-regression-tests" +# type: remove +# +# - name: Remove 'runner:gpu' label +# continue-on-error: true +# uses: buildsville/add-remove-label@6008d7bd99d3baeb7c04033584e68f8ec80b198b # v1.0 +# with: +# token: ${{secrets.GITHUB_TOKEN}} +# label: "runner:gpu" +# type: remove +# +# remove_runner_gpu: +# name: Delete Github Runner - GPU +# needs: +# - deploy_runner_gpu +# - model_regression_test_gpu +# runs-on: ubuntu-22.04 +# if: ${{ always() && needs.deploy_runner_gpu.result != 'skipped' && contains(github.event.pull_request.labels.*.name, 'runner:gpu') && contains(github.event.pull_request.labels.*.name, 'status:model-regression-tests') }} +# +# steps: +# # Setup gcloud auth +# - uses: google-github-actions/auth@e8df18b60c5dd38ba618c121b779307266153fbf +# with: +# service_account: ${{ secrets.GKE_RASA_CI_GPU_SA_NAME_RASA_CI_CD }} +# credentials_json: ${{ secrets.GKE_SA_RASA_CI_CD_GPU_RASA_CI_CD }} +# +# # Get the GKE credentials for the cluster +# - name: Get GKE Cluster Credentials +# uses: google-github-actions/get-gke-credentials@894c221960ab1bc16a69902f29f090638cca753f +# with: +# cluster_name: ${{ secrets.GKE_GPU_CLUSTER_RASA_CI_CD }} +# location: ${{ env.GKE_ZONE }} +# project_id: ${{ secrets.GKE_SA_RASA_CI_GPU_PROJECT_RASA_CI_CD }} +# +# - name: Remove Github Runner +# run: kubectl -n github-runner delete deployments github-runner-${GITHUB_RUN_ID} --grace-period=30 diff --git a/.github/workflows/continous-integration.yml b/.github/workflows/continous-integration.yml index 587ad2ad26e4..9e9ff08f3cea 100644 --- a/.github/workflows/continous-integration.yml +++ b/.github/workflows/continous-integration.yml @@ -1,1218 +1,1218 @@ -name: Continuous Integration - -on: - push: - branches: - - main - tags: - - "*" - pull_request: - -concurrency: - group: continous-integration-${{ github.ref }} # branch or tag name - cancel-in-progress: true - -# SECRETS -# - GH_RELEASE_NOTES_TOKEN: personal access token of `rasabot` github account -# (login for account in 1pw) -# - SLACK_WEBHOOK_TOKEN: token to post to RasaHQ slack account (in 1password) -# - PYPI_TOKEN: publishing token for amn41 account, needs to be maintainer of -# RasaHQ/rasa on pypi (account credentials in 1password) -# - DOCKERHUB_PASSWORD: password for an account with write access to the rasa -# repo on hub.docker.com. used to pull and upload containers -# - RASA_OSS_TELEMETRY_WRITE_KEY: key to write to segment. Used to report telemetry. -# The key will be added to the distributions -# - RASA_OSS_EXCEPTION_WRITE_KEY: key to write to sentry. Used to report exceptions. -# The key will be added to the distributions. -# Key can be found at https://sentry.io/settings/rasahq/projects/rasa-open-source/install/python/ -# - SENTRY_AUTH_TOKEN: authentication used to tell Sentry about any new releases -# created at https://sentry.io/settings/account/api/auth-tokens/ - -env: - # needed to fix issues with boto during testing: - # https://github.com/travis-ci/travis-ci/issues/7940 - BOTO_CONFIG: /dev/null - - IS_TAG_BUILD: ${{ startsWith(github.event.ref, 'refs/tags') }} - DOCKERHUB_USERNAME: tmbo - DEFAULT_PYTHON_VERSION: "3.10" - - # for wait_for_xx jobs - WAIT_TIMEOUT_SECS: 3000 - WAIT_INTERVAL_SECS: 60 - -jobs: - changes: - name: Check for file changes - runs-on: ubuntu-22.04 - outputs: - # Both of the outputs below are strings but only one exists at any given time - backend: ${{ steps.changed-files.outputs.backend || steps.run-all.outputs.backend }} - docker: ${{ steps.changed-files.outputs.docker || steps.run-all.outputs.docker }} - docs: ${{ steps.changed-files.outputs.docs || steps.run-all.outputs.docs }} - is_pre_release_version: ${{ steps.rasa_check_version_type.outputs.is_pre_release_version }} - steps: - - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - - uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 - # Run the normal filters if the all-tests-required label is not set - id: changed-files - if: contains(github.event.pull_request.labels.*.name, 'status:all-tests-required') == false && github.event_name == 'pull_request' - with: - token: ${{ secrets.GITHUB_TOKEN }} - filters: .github/change_filters.yml - - name: Set all filters to true if all tests are required - # Set all filters to true if the all-tests-required label is set or if we are not in a PR - # Bypasses all the change filters in change_filters.yml and forces all outputs to true - id: run-all - if: contains(github.event.pull_request.labels.*.name, 'status:all-tests-required') || github.event_name != 'pull_request' - run: | - echo "backend=true" >> $GITHUB_OUTPUT - echo "docker=true" >> $GITHUB_OUTPUT - echo "docs=true" >> $GITHUB_OUTPUT - - - name: Check if tag version is a pre release version - id: rasa_check_version_type - if: env.IS_TAG_BUILD == 'true' - run: | - # Get current tagged Rasa version - CURRENT_TAG=${GITHUB_REF#refs/tags/} - if [[ "$CURRENT_TAG" =~ ^[0-9.]+$ ]]; then - echo "is_pre_release_version=false" >> $GITHUB_OUTPUT - else - echo "is_pre_release_version=true" >> $GITHUB_OUTPUT - fi - - wait_for_docs_tests: - # Looks for doc test workflows and waits for it to complete successfully - # Runs on pushes to main exclusively - name: Wait for docs tests - if: github.ref_type != 'tag' - runs-on: ubuntu-22.04 - needs: [changes] - - steps: - - name: Wait for doc tests - uses: fountainhead/action-wait-for-check@297be350cf8393728ea4d4b39435c7d7ae167c93 - id: wait-for-doc-tests - with: - token: ${{ secrets.GITHUB_TOKEN }} - checkName: Test Documentation - ref: ${{ github.event.pull_request.head.sha || github.sha }} - timeoutSeconds: ${{ env.WAIT_TIMEOUT_SECS }} - intervalSeconds: ${{ env.WAIT_INTERVAL_SECS }} - - - name: Fail the step if the doc tests run could not be found - if: ${{ steps.wait-for-doc-tests.outputs.conclusion == 'timed_out' }} - run: | - echo "Could not find the doc tests run." - exit 1 - - quality: - name: Code Quality - if: github.ref_type != 'tag' - runs-on: ubuntu-22.04 - needs: [changes] - - steps: - - name: Checkout git repository πŸ• - if: needs.changes.outputs.backend == 'true' - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - - - name: Set up Python ${{ env.DEFAULT_PYTHON_VERSION }} 🐍 - if: needs.changes.outputs.backend == 'true' - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b - with: - python-version: ${{ env.DEFAULT_PYTHON_VERSION }} - - - name: Read Poetry Version πŸ”’ - if: needs.changes.outputs.backend == 'true' - run: | - echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV - shell: bash - - - name: Install poetry πŸ¦„ - if: needs.changes.outputs.backend == 'true' - uses: Gr1N/setup-poetry@15821dc8a61bc630db542ae4baf6a7c19a994844 # v8 - with: - poetry-version: ${{ env.POETRY_VERSION }} - - - name: Inject setuptools into poetry's runtime environment - if: needs.changes.outputs.backend == 'true' - run: | - poetry self add setuptools - - - name: Load Poetry Cached Libraries ⬇ - id: cache-poetry - if: needs.changes.outputs.backend == 'true' - uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 - with: - path: .venv - key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-${{ env.DEFAULT_PYTHON_VERSION }}-${{ hashFiles('**/poetry.lock') }}-${{ secrets.POETRY_CACHE_VERSION }} - restore-keys: ${{ runner.os }}-poetry-${{ env.DEFAULT_PYTHON_VERSION }} - - - name: Clear Poetry cache - if: steps.cache-poetry.outputs.cache-hit == 'true' && needs.changes.outputs.backend == 'true' && contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests') - run: rm -r .venv - - - name: Create virtual environment - if: (steps.cache-poetry.outputs.cache-hit != 'true' || contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests')) && needs.changes.outputs.backend == 'true' - run: python -m venv create .venv - - - name: Set up virtual environment - if: needs.changes.outputs.backend == 'true' - run: poetry config virtualenvs.in-project true - - - name: Install Dependencies πŸ“¦ - if: needs.changes.outputs.backend == 'true' - # Poetry intermittently fails to install dependency if it is not PEP 517 compliant - # This is a workaround for that issue - run: | - sudo apt-get -y install libpq-dev - make install-full - - - name: Checkout target branch to be able to diff - if: needs.changes.outputs.backend == 'true' && github.event_name == 'pull_request' - run: | - git fetch origin ${{ github.base_ref }} - echo "DOCSTRING_DIFF_BRANCH=origin/${{ github.base_ref }}" >> $GITHUB_ENV - - # Fetch entire history for current branch so that `make lint-docstrings` - # can calculate the proper diff between the branches - git fetch --unshallow origin "${{ github.ref }}" - - - name: Add github workflow problem matchers - if: needs.changes.outputs.backend == 'true' - run: | - echo "::add-matcher::.github/matchers/flake8-error-matcher.json" - - - name: Lint Code 🎎 - if: needs.changes.outputs.backend == 'true' - run: | - # If it's not a pull request, $DOCSTRING_DIFF_BRANCH is unset. - # This will result in an empty diff, which effictively means that - # make lint-docstrings will be skipped for other events than `pull_request` - make lint BRANCH=$DOCSTRING_DIFF_BRANCH - - - name: Check Types πŸ“š - if: needs.changes.outputs.backend == 'true' - run: make types - - - name: Lint Changelog Filenames πŸ“ - if: needs.changes.outputs.backend == 'true' && github.event_name == 'pull_request' - run: make lint-changelog - - - name: Test CLI πŸ–₯ - if: needs.changes.outputs.backend == 'true' - # makes sure we catch any dependency error early. they will create strange - # errors during the docs build, so easier to catch them early on by - # trying to run the `rasa` command once before the docs build. - run: poetry run rasa --help - - changelog: - name: Check for changelog - runs-on: ubuntu-22.04 - - steps: - - name: Checkout git repository πŸ• - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - - - name: Assert release includes all changelog entries - # check changelog folder only when we create pull request preparing release - if: github.event_name == 'pull_request' && startsWith(github.head_ref, 'prepare-release') && needs.changes.outputs.is_pre_release_version == 'false' - working-directory: changelog - run: | - # List all unexpected files in changelog/ - UNEXPECTED_FILES=$(ls -A --ignore={"README.md",".gitignore","_template.md.jinja2"}) - - # Exit with error if found any unexpected files - [[ "$UNEXPECTED_FILES" ]] && \ - echo "Found the following unexpected files in changelogs/" && \ - echo "$UNEXPECTED_FILES" && \ - exit 1 || \ - echo "Release includes all changelog entries." - - test: - name: Run Tests - if: github.ref_type != 'tag' - runs-on: ${{ matrix.os }} - timeout-minutes: 60 - needs: [changes] - strategy: - fail-fast: false - matrix: - test: - - test-cli - - test-core-featurizers - - test-policies - - test-nlu-featurizers - - test-nlu-predictors - - test-full-model-training - - test-other-unit-tests - - test-performance - os: [ubuntu-22.04, windows-2019] - python-version: [3.8, 3.9, "3.10"] - - steps: - - name: Run DataDog Agent - if: needs.changes.outputs.backend == 'true' && (matrix.os != 'windows-2019' || contains(github.event.pull_request.labels.*.name, 'tools:datadog-windows')) - run: | - docker run --name dd_agent -p 8126:8126 -d -e "DD_API_KEY=${{ secrets.DD_API_KEY }}" -e "DD_INSIDE_CI=true" -e "DD_HOSTNAME=none" -e "DD_SITE=datadoghq.eu" -e GITHUB_ACTIONS=true -e CI=true datadog/agent:latest - docker ps --all --filter name=dd_agent --filter status=running --no-trunc --format "{{.ID}} {{.Status}}" - docker port dd_agent - - - name: Checkout git repository πŸ• - if: needs.changes.outputs.backend == 'true' - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - - - name: Set up Python ${{ matrix.python-version }} 🐍 - if: needs.changes.outputs.backend == 'true' - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b - with: - python-version: ${{ matrix.python-version }} - - - name: Read Poetry Version πŸ”’ - if: needs.changes.outputs.backend == 'true' - run: | - echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV - shell: bash - - - name: Install poetry πŸ¦„ - if: needs.changes.outputs.backend == 'true' - uses: Gr1N/setup-poetry@15821dc8a61bc630db542ae4baf6a7c19a994844 # v8 - with: - poetry-version: ${{ env.POETRY_VERSION }} - - - name: Inject setuptools into poetry's runtime environment - if: needs.changes.outputs.backend == 'true' - run: | - poetry self add setuptools - - - name: Prevent race condition in poetry build - # More context about race condition during poetry build can be found here: - # https://github.com/python-poetry/poetry/issues/7611#issuecomment-1747836233 - if: needs.changes.outputs.backend == 'true' - run: | - poetry config installer.max-workers 1 - - - name: Load Poetry Cached Libraries ⬇ - id: cache-poetry - if: needs.changes.outputs.backend == 'true' - uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 - with: - path: .venv - key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-${{ matrix.python-version }}-${{ hashFiles('**/poetry.lock') }}-venv-${{ secrets.POETRY_CACHE_VERSION }}-${{ env.pythonLocation }} - - - name: Clear Poetry cache - if: steps.cache-poetry.outputs.cache-hit == 'true' && needs.changes.outputs.backend == 'true' && contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests') - run: rm -r .venv - - # Poetry >= 1.1.0b uses virtualenv to create a virtual environment. - # The virtualenv simply doesn't work on Windows with our setup, - # that's why we use venv to create virtual environment - - name: Create virtual environment - if: (steps.cache-poetry.outputs.cache-hit != 'true' || contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests')) && needs.changes.outputs.backend == 'true' - run: python -m venv create .venv - - - name: Set up virtual environment - if: needs.changes.outputs.backend == 'true' - # Poetry on Windows cannot pick up the virtual environments directory properly, - # and it creates a new one every time the pipeline runs. - # This step solves this problem β€”Β it tells poetry to always use `.venv` directory inside - # the project itself, which also makes it easier for us to determine the correct directory - # that needs to be cached. - run: poetry config virtualenvs.in-project true - - - name: Install Dependencies (Linux) πŸ“¦ - if: needs.changes.outputs.backend == 'true' && matrix.os == 'ubuntu-22.04' - # Poetry intermittently fails to install dependency if it is not PEP 517 compliant - # This is a workaround for that issue - run: | - sudo apt-get -y install libpq-dev - make install-full | tee .output - if grep 'The lock file is not up to date' .output; then exit 1; fi - make prepare-tests-ubuntu - - - name: Install Dependencies (Windows) πŸ“¦ - if: needs.changes.outputs.backend == 'true' && matrix.os == 'windows-2019' - # Restoring cache doesn't work properly on Windows due to symlinks. - # We create symlinks for spacy models, that's why we need to clean them up - # before caching the dependencies directory. - # More information: https://github.com/actions/cache/issues/120 - # Poetry intermittently fails to install dependency if it is not PEP 517 compliant - # This is a workaround for that issue - run: | - $spacy_data_dir = ".venv\lib\site-packages\spacy\data" - if (Test-Path $spacy_data_dir) { - Get-ChildItem -Force -ErrorAction Stop $spacy_data_dir | Where-Object { if($_.Attributes -match "ReparsePoint"){$_.Delete()} } - Remove-Item -Force -Recurse $spacy_data_dir - New-Item -Path $spacy_data_dir -Type Directory - } - make install-full - make prepare-tests-windows-gha - - - name: Add github workflow problem matchers - if: needs.changes.outputs.backend == 'true' && matrix.python-version == 3.7 && matrix.os == 'ubuntu-22.04' - # only annotate based on test runs on ubuntu: otherwise - # all errors will be duplicated for each python / os combination - # therefore, we only enable for the one where most tests are run - # (tests will still run in other envs, they will just not create annotations) - run: pip install pytest-github-actions-annotate-failures - - - name: Disable "LongPathsEnabled" option on Windows - if: matrix.os == 'windows-2019' - # On Windows laptops, a default preset prevents path names from being longer than - # 260 characters. Some of our users can't enable this setting due to company policies. - # We implemented a fix for model storage. The Windows container in GitHub - # comes with the setting enabled, so we disable it here in order to ensure our tests - # are running in an environment where long path names are prevented. - run: | - (Get-ItemProperty "HKLM:System\CurrentControlSet\Control\FileSystem").LongPathsEnabled - Set-ItemProperty 'HKLM:\System\CurrentControlSet\Control\FileSystem' -Name 'LongPathsEnabled' -value 0 - - - name: Install ddtrace on Linux - if: needs.changes.outputs.backend == 'true' && matrix.os == 'ubuntu-22.04' - run: poetry run pip install -U 'ddtrace<2.0.0' - - - name: Install ddtrace on Windows - if: needs.changes.outputs.backend == 'true' && matrix.os == 'windows-2019' - run: | - .\.venv\Scripts\activate - py -m pip install -U 'ddtrace<2.0.0' - - - name: Test Code πŸ” (multi-process) - if: needs.changes.outputs.backend == 'true' - env: - JOBS: 2 - PYTHONIOENCODING: "utf-8" - DD_ENV: ${{ matrix.test }} - DD_SERVICE: rasa - DD_ARGS: --ddtrace --ddtrace-patch-all - run: | - make ${{ matrix.test }} - if [[ "${{ matrix.os }}" != "windows-2019" ]]; then - mv .coverage ${{ github.workspace }}/${{ matrix.test }}-coverage - fi - shell: bash # bash shell is a way to make code run for both Linux and Windows - - - name: Store coverage reports - if: needs.changes.outputs.backend == 'true' && matrix.os == 'ubuntu-22.04' - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce - with: - name: ${{ matrix.test }}-coverage - path: | - ${{ github.workspace }}/${{ matrix.test }}-coverage - - test-flaky: - name: Run Flaky Tests - if: github.ref_type != 'tag' - runs-on: ${{ matrix.os }} - timeout-minutes: 60 - needs: [changes] - strategy: - fail-fast: false - matrix: - os: [ubuntu-22.04, windows-2019] - python-version: [3.8, 3.9, "3.10"] - - steps: - - name: Run DataDog Agent - if: needs.changes.outputs.backend == 'true' && (matrix.os != 'windows-2019' || contains(github.event.pull_request.labels.*.name, 'tools:datadog-windows')) - run: | - docker run --name dd_agent -p 8126:8126 -d -e "DD_API_KEY=${{ secrets.DD_API_KEY }}" -e "DD_INSIDE_CI=true" -e "DD_HOSTNAME=none" -e "DD_SITE=datadoghq.eu" -e GITHUB_ACTIONS=true -e CI=true datadog/agent:latest - docker ps --all --filter name=dd_agent --filter status=running --no-trunc --format "{{.ID}} {{.Status}}" - docker port dd_agent - - - name: Checkout git repository πŸ• - if: needs.changes.outputs.backend == 'true' - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - - - name: Set up Python ${{ matrix.python-version }} 🐍 - if: needs.changes.outputs.backend == 'true' - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b - with: - python-version: ${{ matrix.python-version }} - - - name: Read Poetry Version πŸ”’ - if: needs.changes.outputs.backend == 'true' - run: | - echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV - shell: bash - - - name: Install poetry πŸ¦„ - if: needs.changes.outputs.backend == 'true' - uses: Gr1N/setup-poetry@15821dc8a61bc630db542ae4baf6a7c19a994844 # v8 - with: - poetry-version: ${{ env.POETRY_VERSION }} - - - name: Inject setuptools into poetry's runtime environment - if: needs.changes.outputs.backend == 'true' - run: | - poetry self add setuptools - - - name: Load Poetry Cached Libraries ⬇ - id: cache-poetry - if: needs.changes.outputs.backend == 'true' - uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 - with: - path: .venv - key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-${{ matrix.python-version }}-${{ hashFiles('**/poetry.lock') }}-venv-${{ secrets.POETRY_CACHE_VERSION }}-${{ env.pythonLocation }} - - - name: Clear Poetry cache - if: steps.cache-poetry.outputs.cache-hit == 'true' && needs.changes.outputs.backend == 'true' && contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests') - run: rm -r .venv - - # Poetry >= 1.1.0b uses virtualenv to create a virtual environment. - # The virtualenv simply doesn't work on Windows with our setup, - # that's why we use venv to create virtual environment - - name: Create virtual environment - if: (steps.cache-poetry.outputs.cache-hit != 'true' || contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests')) && needs.changes.outputs.backend == 'true' - run: python -m venv create .venv - - - name: Set up virtual environment - if: needs.changes.outputs.backend == 'true' - # Poetry on Windows cannot pick up the virtual environments directory properly, - # and it creates a new one every time the pipeline runs. - # This step solves this problem β€”Β it tells poetry to always use `.venv` directory inside - # the project itself, which also makes it easier for us to determine the correct directory - # that needs to be cached. - run: poetry config virtualenvs.in-project true - - - name: Install Dependencies (Linux) πŸ“¦ - if: needs.changes.outputs.backend == 'true' && matrix.os == 'ubuntu-22.04' - run: | - sudo apt-get -y install libpq-dev - make install-full | tee .output - if grep 'The lock file is not up to date' .output; then exit 1; fi - make prepare-tests-ubuntu - - - name: Install Dependencies (Windows) πŸ“¦ - if: needs.changes.outputs.backend == 'true' && matrix.os == 'windows-2019' - # Restoring cache doesn't work properly on Windows due to symlinks. - # We create symlinks for spacy models, that's why we need to clean them up - # before caching the dependencies' directory. - # More information: https://github.com/actions/cache/issues/120 - run: | - $spacy_data_dir = ".venv\lib\site-packages\spacy\data" - if (Test-Path $spacy_data_dir) { - Get-ChildItem -Force -ErrorAction Stop $spacy_data_dir | Where-Object { if($_.Attributes -match "ReparsePoint"){$_.Delete()} } - Remove-Item -Force -Recurse $spacy_data_dir - New-Item -Path $spacy_data_dir -Type Directory - } - make install-full - make prepare-tests-windows-gha - - - name: Add github workflow problem matchers - if: needs.changes.outputs.backend == 'true' && matrix.python-version == 3.7 && matrix.os == 'ubuntu-22.04' - # only annotate based on test runs on ubuntu: otherwise - # all errors will be duplicated for each python / os combination - # therefore, we only enable for the one where most tests are run - # (tests will still run in other envs, they will just not create annotations) - run: pip install pytest-github-actions-annotate-failures - - - name: Disable "LongPathsEnabled" option on Windows - if: matrix.os == 'windows-2019' - # On Windows laptops, a default preset prevents path names from being longer than - # 260 characters. Some of our users can't enable this setting due to company policies. - # We implemented a fix for model storage. The Windows container in GitHub - # comes with the setting enabled, so we disable it here in order to ensure our tests - # are running in an environment where long path names are prevented. - run: | - (Get-ItemProperty "HKLM:System\CurrentControlSet\Control\FileSystem").LongPathsEnabled - Set-ItemProperty 'HKLM:\System\CurrentControlSet\Control\FileSystem' -Name 'LongPathsEnabled' -value 0 - - - name: Install ddtrace on Linux - if: needs.changes.outputs.backend == 'true' && matrix.os == 'ubuntu-22.04' - run: poetry run pip install -U 'ddtrace<2.0.0' - - - name: Install ddtrace on Windows - if: needs.changes.outputs.backend == 'true' && matrix.os == 'windows-2019' - run: | - .\.venv\Scripts\activate - py -m pip install -U 'ddtrace<2.0.0' - - - name: Test Code πŸ” (multi-process) - if: needs.changes.outputs.backend == 'true' - env: - JOBS: 2 - PYTHONIOENCODING: "utf-8" - DD_ENV: test-flaky - DD_SERVICE: rasa - DD_ARGS: --ddtrace --ddtrace-patch-all - run: | - make test-flaky - if [[ "${{ matrix.os }}" != "windows-2019" ]]; then - mv .coverage ${{ github.workspace }}/test-flaky-coverage - fi - shell: bash # bash shell is a way to make code run for both Linux and Windows - - - name: Store coverage reports - if: needs.changes.outputs.backend == 'true' && matrix.os == 'ubuntu-22.04' - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce - with: - name: ${{ matrix.test }}-coverage - path: | - ${{ github.workspace }}/${{ matrix.test }}-coverage - - upload_coverage_reports: - name: Upload coverage reports to codeclimate - if: github.ref_type != 'tag' - runs-on: ubuntu-22.04 - # Always upload results even if tests failed - needs: - - test - - changes - - steps: - - name: Checkout git repository πŸ• - if: needs.changes.outputs.backend == 'true' - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - - - name: Set up Python 3.10 🐍 - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b - with: - python-version: "3.10" - - - name: Get backend coverage reports - if: needs.changes.outputs.backend == 'true' - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a - with: - path: ${{ github.workspace }}/tests_coverage - - - name: Merge all reports - if: needs.changes.outputs.backend == 'true' - run: | - subs=`ls ${{ github.workspace }}/tests_coverage` - download_dir="${{ github.workspace }}/tests_coverage" - final_dir="${{ github.workspace }}/tests_coverage/final" - - # Downloaded artifacts go into folders, gotta extract them all into one folder for upload - mkdir "${final_dir}/" - for i in $subs; do - mv "${download_dir}/$i"/* "${final_dir}/" - done - - pip install coverage - coverage combine "${final_dir}/"* - coverage xml - - - name: Upload reports to codeclimate - if: needs.changes.outputs.backend == 'true' - uses: paambaati/codeclimate-action@b649ad206d2e83dafb9ed130deba698aa1b41d78 - env: - CC_TEST_REPORTER_ID: ${{ secrets.CODECLIMATE_REPORTER_ID }} - with: - coverageLocations: | - ${{ github.workspace }}/coverage.xml:coverage.py - debug: true - - integration_test: - name: Run Non-Sequential Integration Tests - if: github.ref_type != 'tag' - runs-on: ubuntu-22.04 - timeout-minutes: 60 - needs: [changes] - env: - REDIS_HOST: localhost - REDIS_PORT: 6379 - POSTGRES_HOST: localhost - POSTGRES_PORT: 5432 - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - RABBITMQ_HOST: localhost - RABBITMQ_PORT: 5672 - RABBITMQ_USER: guest - RABBITMQ_PASSWORD: guest - - services: - redis: - image: redis:6 - # Set health checks to wait until redis has started - options: >- - --health-cmd "redis-cli ping" - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - # FIXME: cannot use ${{ env.REDIS_PORT }} here - # mapping container ports to the host - - 6379:6379 - - postgres: - image: postgres:13 - # Set health checks to wait until postgres has started - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - env: - # postgres image requires password to be set - POSTGRES_PASSWORD: ${{ env.POSTGRES_PASSWORD }} - ports: - # FIXME: cannot use ${{ env.POSTGRES_PORT }} here - # mapping container ports to the host - - 5432:5432 - - rabbitmq: - # see https://github.com/docker-library/healthcheck/blob/master/rabbitmq/docker-healthcheck - image: healthcheck/rabbitmq - ports: - - 5672:5672 - - mongodb: - image: mongodb/mongodb-community-server:6.0.4-ubuntu2204 - options: >- - --health-cmd "echo 'db.runCommand("ping").ok' | mongosh --quiet" - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 27017:27017 - - steps: - - name: Checkout git repository πŸ• - if: needs.changes.outputs.backend == 'true' - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - - - name: Set up Python ${{ env.DEFAULT_PYTHON_VERSION }} 🐍 - if: needs.changes.outputs.backend == 'true' - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b - with: - python-version: ${{ env.DEFAULT_PYTHON_VERSION }} - - - name: Read Poetry Version πŸ”’ - if: needs.changes.outputs.backend == 'true' - run: | - echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV - shell: bash - - - name: Install poetry πŸ¦„ - if: needs.changes.outputs.backend == 'true' - uses: Gr1N/setup-poetry@15821dc8a61bc630db542ae4baf6a7c19a994844 # v8 - with: - poetry-version: ${{ env.POETRY_VERSION }} - - - name: Load Poetry Cached Libraries ⬇ - id: cache-poetry - if: needs.changes.outputs.backend == 'true' - uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 - with: - path: .venv - key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-${{ env.DEFAULT_PYTHON_VERSION }}-${{ hashFiles('**/poetry.lock') }}-venv-${{ secrets.POETRY_CACHE_VERSION }}-${{ env.pythonLocation }} - - - name: Clear Poetry cache - if: steps.cache-poetry.outputs.cache-hit == 'true' && needs.changes.outputs.backend == 'true' && contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests') - run: rm -r .venv - - # Poetry >= 1.1.0b uses virtualenv to create a virtual environment. - # The virtualenv simply doesn't work on Windows with our setup, - # that's why we use venv to create virtual environment - - name: Create virtual environment - if: (steps.cache-poetry.outputs.cache-hit != 'true' || contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests')) && needs.changes.outputs.backend == 'true' - run: python -m venv create .venv - - - name: Set up virtual environment - if: needs.changes.outputs.backend == 'true' - # Poetry on Windows cannot pick up the virtual environments directory properly, - # and it creates a new one every time the pipeline runs. - # This step solves this problem β€”Β it tells poetry to always use `.venv` directory inside - # the project itself, which also makes it easier for us to determine the correct directory - # that needs to be cached. - run: poetry config virtualenvs.in-project true - - - name: Install Dependencies (Linux) πŸ“¦ - if: needs.changes.outputs.backend == 'true' - run: | - sudo apt-get -y install libpq-dev - make install-full | tee .output - if grep 'The lock file is not up to date' .output; then exit 1; fi - make prepare-tests-ubuntu - - - name: Run kafka and zookeeper containers for integration testing - if: needs.changes.outputs.backend == 'true' - run: | - docker-compose -f tests_deployment/docker-compose.kafka.yml up -d - - - name: Test Code with Services 🩺 - if: needs.changes.outputs.backend == 'true' - env: - JOBS: 2 - INTEGRATION_TEST_PYTEST_MARKERS: '"not sequential"' - PYTHONIOENCODING: "utf-8" - run: | - make test-integration - - sequential_integration_test: - name: Run Sequential Integration Tests - if: github.ref_type != 'tag' - runs-on: ubuntu-20.04 - timeout-minutes: 60 - needs: [changes] - env: - POSTGRES_HOST: localhost - POSTGRES_PORT: 5432 - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - - services: - postgres: - image: postgres:13 - # Set health checks to wait until postgres has started - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - env: - # postgres image requires password to be set - POSTGRES_PASSWORD: ${{ env.POSTGRES_PASSWORD }} - ports: - # FIXME: cannot use ${{ env.POSTGRES_PORT }} here - # mapping container ports to the host - - 5432:5432 - - steps: - - name: Checkout git repository πŸ• - if: needs.changes.outputs.backend == 'true' - uses: actions/checkout@v3 - - - name: Set up Python ${{ env.DEFAULT_PYTHON_VERSION }} 🐍 - if: needs.changes.outputs.backend == 'true' - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b - with: - python-version: ${{ env.DEFAULT_PYTHON_VERSION }} - - - name: Read Poetry Version πŸ”’ - if: needs.changes.outputs.backend == 'true' - run: | - echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV - shell: bash - - - name: Install poetry πŸ¦„ - if: needs.changes.outputs.backend == 'true' - uses: Gr1N/setup-poetry@15821dc8a61bc630db542ae4baf6a7c19a994844 # v8 - with: - poetry-version: ${{ env.POETRY_VERSION }} - - - name: Load Poetry Cached Libraries ⬇ - id: cache-poetry - if: needs.changes.outputs.backend == 'true' - uses: actions/cache@v3 - with: - path: .venv - key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-${{ env.DEFAULT_PYTHON_VERSION }}-${{ hashFiles('**/poetry.lock') }}-venv-${{ secrets.POETRY_CACHE_VERSION }}-${{ env.pythonLocation }} - - - name: Clear Poetry cache - if: steps.cache-poetry.outputs.cache-hit == 'true' && needs.changes.outputs.backend == 'true' && contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests') - run: rm -r .venv - - # Poetry >= 1.1.0b uses virtualenv to create a virtual environment. - # The virtualenv simply doesn't work on Windows with our setup, - # that's why we use venv to create virtual environment - - name: Create virtual environment - if: (steps.cache-poetry.outputs.cache-hit != 'true' || contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests')) && needs.changes.outputs.backend == 'true' - run: python -m venv create .venv - - - name: Set up virtual environment - if: needs.changes.outputs.backend == 'true' - # Poetry on Windows cannot pick up the virtual environments directory properly, - # and it creates a new one every time the pipeline runs. - # This step solves this problem β€”Β it tells poetry to always use `.venv` directory inside - # the project itself, which also makes it easier for us to determine the correct directory - # that needs to be cached. - run: poetry config virtualenvs.in-project true - - - name: Install Dependencies (Linux) πŸ“¦ - if: needs.changes.outputs.backend == 'true' - run: | - sudo apt-get -y install libpq-dev - make install-full | tee .output - if grep 'The lock file is not up to date' .output; then exit 1; fi - make prepare-tests-ubuntu - - # these integration tests need to be ran in a sequential fashion, - # due to environment constraints, so we're running them in a single process. - - name: Test Code with Services 🩺 (sequential) - if: needs.changes.outputs.backend == 'true' - env: - JOBS: 1 - INTEGRATION_TEST_PYTEST_MARKERS: "sequential" - PYTHONIOENCODING: "utf-8" - run: | - make test-integration - - - name: Stop kafka and zookeeper containers for integration testing - if: needs.changes.outputs.backend == 'true' - run: | - docker-compose -f tests_deployment/docker-compose.kafka.yml down - - build_docker_base_images_and_set_env: - name: Build Docker base images and setup environment - runs-on: ubuntu-22.04 - outputs: - base_image_hash: ${{ steps.check_image.outputs.base_image_hash }} - base_mitie_image_hash: ${{ steps.check_image.outputs.base_mitie_image_hash }} - base_builder_image_hash: ${{ steps.check_image.outputs.base_builder_image_hash }} - # Tag name used for images created during Docker image builds, e.g. 3886 - a PR number - image_tag: ${{ steps.set_output.outputs.image_tag }} - # Return 'true' if tag version is equal or higher than the latest tagged Rasa version - is_newest_version: ${{ steps.rasa_get_version.outputs.is_newest_version }} - - steps: - # Due to an issue with checking out a wrong commit, we make sure - # to checkout HEAD commit for a pull request. - # More details: https://github.com/actions/checkout/issues/299 - - name: Checkout pull request HEAD commit instead of merge commit πŸ• - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - if: github.event_name == 'pull_request' - with: - ref: ${{ github.event.pull_request.head.sha }} - - - name: Checkout git repository πŸ• - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - if: github.event_name != 'pull_request' - - - name: Set up QEMU - uses: docker/setup-qemu-action@2b82ce82d56a2a04d2637cd93a637ae1b359c0a7 # v2.2.0 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@ecf95283f03858871ff00b787d79c419715afc34 # v2.7.0 - - - name: Read Poetry Version πŸ”’ - run: | - echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV - shell: bash - - - name: Login to DockerHub Registry πŸ”’ - run: echo ${{ secrets.DOCKERHUB_PASSWORD }} | docker login -u ${{ env.DOCKERHUB_USERNAME }} --password-stdin || true - - - name: Check if tag version is equal or higher than the latest tagged Rasa version - id: rasa_get_version - if: env.IS_TAG_BUILD == 'true' - run: | - # Get latest tagged Rasa version - git fetch --depth=1 origin "+refs/tags/*:refs/tags/*" - # Fetch branch history - git fetch --prune --unshallow - LATEST_TAGGED_NON_ALPHA_RASA_VERSION=$(git tag | sort -r -V | grep -E "^[0-9.]+$" | head -n1) - CURRENT_TAG=${GITHUB_REF#refs/tags/} - # Return 'true' if tag version is equal or higher than the latest tagged Rasa version - IS_NEWEST_VERSION=$((printf '%s\n%s\n' "${LATEST_TAGGED_NON_ALPHA_RASA_VERSION}" "$CURRENT_TAG" \ - | sort -V -C && echo true || echo false) || true) - # Avoid that the script gets released for alphas or release candidates - if [[ "${IS_NEWEST_VERSION}" == "true" && "$CURRENT_TAG" =~ ^[0-9.]+$ ]]; then - echo "is_newest_version=true" >> $GITHUB_OUTPUT - else - echo "is_newest_version=false" >> $GITHUB_OUTPUT - fi - - - name: Check if a base image exists - id: check_image - env: - DOCKER_CLI_EXPERIMENTAL: enabled - run: | - # Base image - BASE_IMAGE_HASH=${{ hashFiles('docker/Dockerfile.base') }} - echo "base_image_hash=${BASE_IMAGE_HASH}" >> $GITHUB_OUTPUT - - BASE_IMAGE_EXISTS=$((docker manifest inspect rasa/rasa:base-${BASE_IMAGE_HASH} &> /dev/null && echo true || echo false) || true) - echo "base_exists=${BASE_IMAGE_EXISTS}" >> $GITHUB_OUTPUT - - # Base MITIE image - BASE_MITIE_IMAGE_HASH=${{ hashFiles('docker/Dockerfile.base-mitie') }} - MAKEFILE_MITIE_HASH=${{ hashFiles('Makefile') }} - echo "base_mitie_image_hash=${BASE_MITIE_IMAGE_HASH:0:50}-${MAKEFILE_MITIE_HASH:0:50}" >> $GITHUB_OUTPUT - - BASE_IMAGE_MITIE_EXISTS=$((docker manifest inspect rasa/rasa:base-mitie-${BASE_MITIE_IMAGE_HASH:0:50}-${MAKEFILE_MITIE_HASH:0:50} &> /dev/null && echo true || echo false) || true) - echo "base_mitie_exists=${BASE_IMAGE_MITIE_EXISTS}" >> $GITHUB_OUTPUT - - # Base poetry image - BASE_IMAGE_POETRY_EXISTS=$((docker manifest inspect rasa/rasa:base-poetry-${{ env.POETRY_VERSION }} &> /dev/null && echo true || echo false) || true) - echo "base_poetry_exists=${BASE_IMAGE_POETRY_EXISTS}" >> $GITHUB_OUTPUT - - # Base builder image - BASE_IMAGE_BUILDER_HASH=${{ hashFiles('docker/Dockerfile.base-builder') }}-poetry-${{ env.POETRY_VERSION }} - echo "base_builder_image_hash=${BASE_IMAGE_BUILDER_HASH}" >> $GITHUB_OUTPUT - - BASE_IMAGE_BUILDER_EXISTS=$((docker manifest inspect rasa/rasa:base-builder-${BASE_IMAGE_BUILDER_HASH} &> /dev/null && echo true || echo false) || true) - echo "base_builder_exists=${BASE_IMAGE_BUILDER_EXISTS}" >> $GITHUB_OUTPUT - - - name: Build Docker base image πŸ›  - if: steps.check_image.outputs.base_exists == 'false' || env.IS_TAG_BUILD == 'true' - run: | - export IMAGE_TAG=${{ steps.check_image.outputs.base_image_hash }} - docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl base - - - name: Push Docker base image if it's not building from a fork ⬆ - if: (steps.check_image.outputs.base_exists == 'false' || env.IS_TAG_BUILD == 'true') && github.event.pull_request.head.repo.owner.login == 'RasaHQ' - run: | - export IMAGE_TAG=${{ steps.check_image.outputs.base_image_hash }} - docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl base --push - - - name: Build Docker mitie base image πŸ›  - if: steps.check_image.outputs.base_mitie_exists == 'false' || steps.check_image.outputs.base_exists == 'false' - run: | - export IMAGE_TAG=${{ steps.check_image.outputs.base_mitie_image_hash }} - docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl base-mitie - - - name: Push Docker mitie base image if it's not building from a fork ⬆ - if: (steps.check_image.outputs.base_mitie_exists == 'false' || steps.check_image.outputs.base_exists == 'false') && github.event.pull_request.head.repo.owner.login == 'RasaHQ' - run: | - export IMAGE_TAG=${{ steps.check_image.outputs.base_mitie_image_hash }} - docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl base-mitie --push - - - name: Build Docker poetry base image πŸ›  - if: steps.check_image.outputs.base_poetry_exists == 'false' || steps.check_image.outputs.base_exists == 'false' - run: | - export IMAGE_TAG=${{ env.POETRY_VERSION }} - export BASE_IMAGE_HASH=${{ steps.check_image.outputs.base_image_hash }} - docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl base-poetry - - - name: Push Docker poetry base image if it's not building from a fork ⬆ - if: (steps.check_image.outputs.base_poetry_exists == 'false' || steps.check_image.outputs.base_exists == 'false') && github.event.pull_request.head.repo.owner.login == 'RasaHQ' - run: | - export IMAGE_TAG=${{ env.POETRY_VERSION }} - export BASE_IMAGE_HASH=${{ steps.check_image.outputs.base_image_hash }} - docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl base-poetry --push - - - name: Build Docker builder base image πŸ›  - if: steps.check_image.outputs.base_builder_exists == 'false' || steps.check_image.outputs.base_exists == 'false' - run: | - export IMAGE_TAG=${{ steps.check_image.outputs.base_builder_image_hash }} - docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl base-builder - - - name: Push Docker builder base image if it's not building from a fork ⬆ - if: (steps.check_image.outputs.base_builder_exists == 'false' || steps.check_image.outputs.base_exists == 'false') && github.event.pull_request.head.repo.owner.login == 'RasaHQ' - run: | - export IMAGE_TAG=${{ steps.check_image.outputs.base_builder_image_hash }} - docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl base-builder --push - - # Set environment variables for a pull request - # - # In this scenario, we've created a PR #1234 - # - # Example output: - # IMAGE_TAG=1234 - - name: Set environment variables - pull_request - if: github.event_name == 'pull_request' && env.IS_TAG_BUILD == 'false' - run: | - echo "IMAGE_TAG=${{ github.event.number }}" >> $GITHUB_ENV - - # Set environment variables for a tag - # - # In this scenario, we've pushed the '2.0.6' tag - # - # Example output: - # TAG_NAME=2.0.6 - # IMAGE_TAG=2.0.6 - - name: Set environment variables - push - tag - if: github.event_name == 'push' && env.IS_TAG_BUILD == 'true' - run: | - TAG_NAME=${GITHUB_REF#refs/tags/} - echo "IMAGE_TAG=${TAG_NAME}" >> $GITHUB_ENV - - # Set environment variables for a branch - # - # In this scenario, we've pushed changes into the main branch - # - # Example output: - # IMAGE_TAG=main - - name: Set environment variables - push - branch - if: github.event_name == 'push' && env.IS_TAG_BUILD == 'false' - run: | - BRANCH_NAME=${GITHUB_REF#refs/heads/} - SAFE_BRANCH_NAME="$(echo ${GITHUB_REF#refs/heads/} | sed 's/[\\*+.$\#\-\/]/-/g')" - echo "IMAGE_TAG=${SAFE_BRANCH_NAME}" >> $GITHUB_ENV - - - name: Set output - id: set_output - run: | - echo "image_tag=${{ env.IMAGE_TAG }}" >> $GITHUB_OUTPUT - - docker: - name: Build Docker - runs-on: ubuntu-22.04 - needs: [changes, build_docker_base_images_and_set_env] - env: - IMAGE_TAG: ${{ needs.build_docker_base_images_and_set_env.outputs.image_tag }} - BASE_IMAGE_HASH: ${{ needs.build_docker_base_images_and_set_env.outputs.base_image_hash }} - BASE_MITIE_IMAGE_HASH: ${{ needs.build_docker_base_images_and_set_env.outputs.base_mitie_image_hash }} - BASE_BUILDER_IMAGE_HASH: ${{ needs.build_docker_base_images_and_set_env.outputs.base_builder_image_hash }} - - strategy: - matrix: - image: [default, full, mitie-en, spacy-de, spacy-it, spacy-en] - - steps: - # Due to an issue with checking out a wrong commit, we make sure - # to checkout HEAD commit for a pull request. - # More details: https://github.com/actions/checkout/issues/299 - - name: Checkout pull request HEAD commit instead of merge commit πŸ• - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - if: github.event_name == 'pull_request' - with: - ref: ${{ github.event.pull_request.head.sha }} - - - name: Checkout git repository πŸ• - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - if: github.event_name != 'pull_request' - - - name: Set up QEMU - uses: docker/setup-qemu-action@2b82ce82d56a2a04d2637cd93a637ae1b359c0a7 # v2.2.0 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@ecf95283f03858871ff00b787d79c419715afc34 # v2.7.0 - - - name: Free disk space - if: needs.changes.outputs.docker == 'true' - # tries to make sure we do not run out of disk space, see - # https://github.community/t5/GitHub-Actions/BUG-Strange-quot-No-space-left-on-device-quot-IOExceptions-on/td-p/46101 - run: | - sudo swapoff -a - sudo rm -f /swapfile - sudo rm -rf "$AGENT_TOOLSDIRECTORY" - sudo apt clean - docker image prune -a -f - docker volume prune -f - docker container prune -f - df -h - - - name: Read Poetry Version πŸ”’ - run: | - echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV - shell: bash - - - name: Echo Available platforms - run: echo ${{ steps.buildx.outputs.platforms }} - - - name: Login to DockerHub Registry πŸ”’ - if: needs.changes.outputs.docker == 'true' - run: echo ${{ secrets.DOCKERHUB_PASSWORD }} | docker login -u ${{ env.DOCKERHUB_USERNAME }} --password-stdin || true - - - name: Copy Segment write key to the package - if: needs.changes.outputs.docker == 'true' && github.event_name == 'push' && startsWith(github.ref, 'refs/tags') && github.repository == 'RasaHQ/rasa' - env: - RASA_TELEMETRY_WRITE_KEY: ${{ secrets.RASA_OSS_TELEMETRY_WRITE_KEY }} - RASA_EXCEPTION_WRITE_KEY: ${{ secrets.RASA_OSS_EXCEPTION_WRITE_KEY }} - run: | - ./scripts/write_keys_file.sh - - - name: Build Docker image - if: needs.changes.outputs.docker == 'true' - run: | - docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl ${{ matrix.image }} - - - name: Check how much space is left after Docker build - run: df -h - - - name: Push image with main tag πŸ“¦ - if: needs.changes.outputs.docker == 'true' && github.event_name == 'push' && github.ref == 'refs/heads/main' && github.repository == 'RasaHQ/rasa' - run: | - docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl ${{ matrix.image }} --push - - - name: Push image with ${{github.ref}} tag πŸ“¦ - if: needs.changes.outputs.docker == 'true' && github.event_name == 'push' && env.IS_TAG_BUILD == 'true' && github.repository == 'RasaHQ/rasa' - run: | - IS_NEWEST_VERSION=${{ needs.build_docker_base_images_and_set_env.outputs.is_newest_version }} - - docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl ${{ matrix.image }} --push - -# # Tag the image as latest -# if [[ "${IS_NEWEST_VERSION}" == "true" ]]; then -# if [[ "${{ matrix.image }}" == "default" ]]; then -# RELEASE_TAG="${IMAGE_TAG}" -# else -# RELEASE_TAG="${IMAGE_TAG}-${{ matrix.image }}" -# fi -# -# LATEST_TAG=$(echo $RELEASE_TAG | sed 's/'$IMAGE_TAG'/latest/g') -# -# docker tag rasa/rasa:${RELEASE_TAG} rasa/rasa:${LATEST_TAG} -# docker push rasa/rasa:${LATEST_TAG} +#name: Continuous Integration +# +#on: +# push: +# branches: +# - main +# tags: +# - "*" +# pull_request: +# +#concurrency: +# group: continous-integration-${{ github.ref }} # branch or tag name +# cancel-in-progress: true +# +## SECRETS +## - GH_RELEASE_NOTES_TOKEN: personal access token of `rasabot` github account +## (login for account in 1pw) +## - SLACK_WEBHOOK_TOKEN: token to post to RasaHQ slack account (in 1password) +## - PYPI_TOKEN: publishing token for amn41 account, needs to be maintainer of +## RasaHQ/rasa on pypi (account credentials in 1password) +## - DOCKERHUB_PASSWORD: password for an account with write access to the rasa +## repo on hub.docker.com. used to pull and upload containers +## - RASA_OSS_TELEMETRY_WRITE_KEY: key to write to segment. Used to report telemetry. +## The key will be added to the distributions +## - RASA_OSS_EXCEPTION_WRITE_KEY: key to write to sentry. Used to report exceptions. +## The key will be added to the distributions. +## Key can be found at https://sentry.io/settings/rasahq/projects/rasa-open-source/install/python/ +## - SENTRY_AUTH_TOKEN: authentication used to tell Sentry about any new releases +## created at https://sentry.io/settings/account/api/auth-tokens/ +# +#env: +# # needed to fix issues with boto during testing: +# # https://github.com/travis-ci/travis-ci/issues/7940 +# BOTO_CONFIG: /dev/null +# +# IS_TAG_BUILD: ${{ startsWith(github.event.ref, 'refs/tags') }} +# DOCKERHUB_USERNAME: tmbo +# DEFAULT_PYTHON_VERSION: "3.10" +# +# # for wait_for_xx jobs +# WAIT_TIMEOUT_SECS: 3000 +# WAIT_INTERVAL_SECS: 60 +# +#jobs: +# changes: +# name: Check for file changes +# runs-on: ubuntu-22.04 +# outputs: +# # Both of the outputs below are strings but only one exists at any given time +# backend: ${{ steps.changed-files.outputs.backend || steps.run-all.outputs.backend }} +# docker: ${{ steps.changed-files.outputs.docker || steps.run-all.outputs.docker }} +# docs: ${{ steps.changed-files.outputs.docs || steps.run-all.outputs.docs }} +# is_pre_release_version: ${{ steps.rasa_check_version_type.outputs.is_pre_release_version }} +# steps: +# - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c +# - uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 +# # Run the normal filters if the all-tests-required label is not set +# id: changed-files +# if: contains(github.event.pull_request.labels.*.name, 'status:all-tests-required') == false && github.event_name == 'pull_request' +# with: +# token: ${{ secrets.GITHUB_TOKEN }} +# filters: .github/change_filters.yml +# - name: Set all filters to true if all tests are required +# # Set all filters to true if the all-tests-required label is set or if we are not in a PR +# # Bypasses all the change filters in change_filters.yml and forces all outputs to true +# id: run-all +# if: contains(github.event.pull_request.labels.*.name, 'status:all-tests-required') || github.event_name != 'pull_request' +# run: | +# echo "backend=true" >> $GITHUB_OUTPUT +# echo "docker=true" >> $GITHUB_OUTPUT +# echo "docs=true" >> $GITHUB_OUTPUT +# +# - name: Check if tag version is a pre release version +# id: rasa_check_version_type +# if: env.IS_TAG_BUILD == 'true' +# run: | +# # Get current tagged Rasa version +# CURRENT_TAG=${GITHUB_REF#refs/tags/} +# if [[ "$CURRENT_TAG" =~ ^[0-9.]+$ ]]; then +# echo "is_pre_release_version=false" >> $GITHUB_OUTPUT +# else +# echo "is_pre_release_version=true" >> $GITHUB_OUTPUT # fi - - deploy: - name: Deploy to PyPI - runs-on: ubuntu-22.04 - - # deploy will only be run when there is a tag available - if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') && github.repository == 'RasaHQ/rasa' - needs: [docker] # only run after the docker build stage succeeds - - steps: - - name: Checkout git repository πŸ• - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - - - name: Set up Python 3.9 🐍 - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b - with: - python-version: 3.9 - - - name: Read Poetry Version πŸ”’ - run: | - echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV - shell: bash - - - name: Install poetry πŸ¦„ - uses: Gr1N/setup-poetry@15821dc8a61bc630db542ae4baf6a7c19a994844 # v8 - with: - poetry-version: ${{ env.POETRY_VERSION }} - - - name: Copy Segment write key to the package - env: - RASA_TELEMETRY_WRITE_KEY: ${{ secrets.RASA_OSS_TELEMETRY_WRITE_KEY }} - RASA_EXCEPTION_WRITE_KEY: ${{ secrets.RASA_OSS_EXCEPTION_WRITE_KEY }} - run: | - ./scripts/write_keys_file.sh - - - name: Build βš’οΈ Distributions - run: poetry build - - - name: Publish to PyPI πŸ“¦ - uses: pypa/gh-action-pypi-publish@c7f29f7adef1a245bd91520e94867e5c6eedddcc - with: - user: __token__ - password: ${{ secrets.PYPI_TOKEN }} - - - name: Notify Sentry about the release - env: - GITHUB_TAG: ${{ github.ref }} - SENTRY_ORG: rasahq - SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} - run: | - curl -sL https://sentry.io/get-cli/ | bash - GITHUB_TAG=${GITHUB_TAG/refs\/tags\//} - sentry-cli releases new -p rasa-open-source "rasa-$GITHUB_TAG" - sentry-cli releases set-commits --auto "rasa-$GITHUB_TAG" - sentry-cli releases finalize "rasa-$GITHUB_TAG" - - - name: Notify Slack & Publish Release Notes πŸ—ž - env: - GH_RELEASE_NOTES_TOKEN: ${{ secrets.GH_RELEASE_NOTES_TOKEN }} - SLACK_WEBHOOK_TOKEN: ${{ secrets.SLACK_WEBHOOK_TOKEN }} - GITHUB_TAG: ${{ github.ref }} - GITHUB_REPO_SLUG: ${{ github.repository }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - GITHUB_TAG=${GITHUB_TAG/refs\/tags\//} - pip install -U github3.py pep440-version-utils - python3 scripts/publish_gh_release_notes.py - ./scripts/ping_slack_about_package_release.sh - - send_slack_notification_for_release_on_failure: - name: Notify Slack & Publish Release Notes - runs-on: ubuntu-22.04 - # run this job when the workflow is triggered by a tag push - if: always() && github.repository == 'RasaHQ/rasa' && github.ref_type == 'tag' - needs: - - deploy - - steps: - - name: Notify Slack of failure ⛔️ - # send notification if 'deploy' is skipped (previous needed job failed) or failed - if: needs.deploy.result != 'success' - env: - SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_TOKEN }} - uses: Ilshidur/action-slack@689ad44a9c9092315abd286d0e3a9a74d31ab78a - with: - args: "⛔️ *Rasa Open Source* version `${{ github.ref_name }}` could not be released 😱! Please check out GitHub Actions: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" +# +# wait_for_docs_tests: +# # Looks for doc test workflows and waits for it to complete successfully +# # Runs on pushes to main exclusively +# name: Wait for docs tests +# if: github.ref_type != 'tag' +# runs-on: ubuntu-22.04 +# needs: [changes] +# +# steps: +# - name: Wait for doc tests +# uses: fountainhead/action-wait-for-check@297be350cf8393728ea4d4b39435c7d7ae167c93 +# id: wait-for-doc-tests +# with: +# token: ${{ secrets.GITHUB_TOKEN }} +# checkName: Test Documentation +# ref: ${{ github.event.pull_request.head.sha || github.sha }} +# timeoutSeconds: ${{ env.WAIT_TIMEOUT_SECS }} +# intervalSeconds: ${{ env.WAIT_INTERVAL_SECS }} +# +# - name: Fail the step if the doc tests run could not be found +# if: ${{ steps.wait-for-doc-tests.outputs.conclusion == 'timed_out' }} +# run: | +# echo "Could not find the doc tests run." +# exit 1 +# +# quality: +# name: Code Quality +# if: github.ref_type != 'tag' +# runs-on: ubuntu-22.04 +# needs: [changes] +# +# steps: +# - name: Checkout git repository πŸ• +# if: needs.changes.outputs.backend == 'true' +# uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c +# +# - name: Set up Python ${{ env.DEFAULT_PYTHON_VERSION }} 🐍 +# if: needs.changes.outputs.backend == 'true' +# uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b +# with: +# python-version: ${{ env.DEFAULT_PYTHON_VERSION }} +# +# - name: Read Poetry Version πŸ”’ +# if: needs.changes.outputs.backend == 'true' +# run: | +# echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV +# shell: bash +# +# - name: Install poetry πŸ¦„ +# if: needs.changes.outputs.backend == 'true' +# uses: Gr1N/setup-poetry@15821dc8a61bc630db542ae4baf6a7c19a994844 # v8 +# with: +# poetry-version: ${{ env.POETRY_VERSION }} +# +# - name: Inject setuptools into poetry's runtime environment +# if: needs.changes.outputs.backend == 'true' +# run: | +# poetry self add setuptools +# +# - name: Load Poetry Cached Libraries ⬇ +# id: cache-poetry +# if: needs.changes.outputs.backend == 'true' +# uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 +# with: +# path: .venv +# key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-${{ env.DEFAULT_PYTHON_VERSION }}-${{ hashFiles('**/poetry.lock') }}-${{ secrets.POETRY_CACHE_VERSION }} +# restore-keys: ${{ runner.os }}-poetry-${{ env.DEFAULT_PYTHON_VERSION }} +# +# - name: Clear Poetry cache +# if: steps.cache-poetry.outputs.cache-hit == 'true' && needs.changes.outputs.backend == 'true' && contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests') +# run: rm -r .venv +# +# - name: Create virtual environment +# if: (steps.cache-poetry.outputs.cache-hit != 'true' || contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests')) && needs.changes.outputs.backend == 'true' +# run: python -m venv create .venv +# +# - name: Set up virtual environment +# if: needs.changes.outputs.backend == 'true' +# run: poetry config virtualenvs.in-project true +# +# - name: Install Dependencies πŸ“¦ +# if: needs.changes.outputs.backend == 'true' +# # Poetry intermittently fails to install dependency if it is not PEP 517 compliant +# # This is a workaround for that issue +# run: | +# sudo apt-get -y install libpq-dev +# make install-full +# +# - name: Checkout target branch to be able to diff +# if: needs.changes.outputs.backend == 'true' && github.event_name == 'pull_request' +# run: | +# git fetch origin ${{ github.base_ref }} +# echo "DOCSTRING_DIFF_BRANCH=origin/${{ github.base_ref }}" >> $GITHUB_ENV +# +# # Fetch entire history for current branch so that `make lint-docstrings` +# # can calculate the proper diff between the branches +# git fetch --unshallow origin "${{ github.ref }}" +# +# - name: Add github workflow problem matchers +# if: needs.changes.outputs.backend == 'true' +# run: | +# echo "::add-matcher::.github/matchers/flake8-error-matcher.json" +# +# - name: Lint Code 🎎 +# if: needs.changes.outputs.backend == 'true' +# run: | +# # If it's not a pull request, $DOCSTRING_DIFF_BRANCH is unset. +# # This will result in an empty diff, which effictively means that +# # make lint-docstrings will be skipped for other events than `pull_request` +# make lint BRANCH=$DOCSTRING_DIFF_BRANCH +# +# - name: Check Types πŸ“š +# if: needs.changes.outputs.backend == 'true' +# run: make types +# +# - name: Lint Changelog Filenames πŸ“ +# if: needs.changes.outputs.backend == 'true' && github.event_name == 'pull_request' +# run: make lint-changelog +# +# - name: Test CLI πŸ–₯ +# if: needs.changes.outputs.backend == 'true' +# # makes sure we catch any dependency error early. they will create strange +# # errors during the docs build, so easier to catch them early on by +# # trying to run the `rasa` command once before the docs build. +# run: poetry run rasa --help +# +# changelog: +# name: Check for changelog +# runs-on: ubuntu-22.04 +# +# steps: +# - name: Checkout git repository πŸ• +# uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c +# +# - name: Assert release includes all changelog entries +# # check changelog folder only when we create pull request preparing release +# if: github.event_name == 'pull_request' && startsWith(github.head_ref, 'prepare-release') && needs.changes.outputs.is_pre_release_version == 'false' +# working-directory: changelog +# run: | +# # List all unexpected files in changelog/ +# UNEXPECTED_FILES=$(ls -A --ignore={"README.md",".gitignore","_template.md.jinja2"}) +# +# # Exit with error if found any unexpected files +# [[ "$UNEXPECTED_FILES" ]] && \ +# echo "Found the following unexpected files in changelogs/" && \ +# echo "$UNEXPECTED_FILES" && \ +# exit 1 || \ +# echo "Release includes all changelog entries." +# +# test: +# name: Run Tests +# if: github.ref_type != 'tag' +# runs-on: ${{ matrix.os }} +# timeout-minutes: 60 +# needs: [changes] +# strategy: +# fail-fast: false +# matrix: +# test: +# - test-cli +# - test-core-featurizers +# - test-policies +# - test-nlu-featurizers +# - test-nlu-predictors +# - test-full-model-training +# - test-other-unit-tests +# - test-performance +# os: [ubuntu-22.04, windows-2019] +# python-version: [3.8, 3.9, "3.10"] +# +# steps: +# - name: Run DataDog Agent +# if: needs.changes.outputs.backend == 'true' && (matrix.os != 'windows-2019' || contains(github.event.pull_request.labels.*.name, 'tools:datadog-windows')) +# run: | +# docker run --name dd_agent -p 8126:8126 -d -e "DD_API_KEY=${{ secrets.DD_API_KEY }}" -e "DD_INSIDE_CI=true" -e "DD_HOSTNAME=none" -e "DD_SITE=datadoghq.eu" -e GITHUB_ACTIONS=true -e CI=true datadog/agent:latest +# docker ps --all --filter name=dd_agent --filter status=running --no-trunc --format "{{.ID}} {{.Status}}" +# docker port dd_agent +# +# - name: Checkout git repository πŸ• +# if: needs.changes.outputs.backend == 'true' +# uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c +# +# - name: Set up Python ${{ matrix.python-version }} 🐍 +# if: needs.changes.outputs.backend == 'true' +# uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b +# with: +# python-version: ${{ matrix.python-version }} +# +# - name: Read Poetry Version πŸ”’ +# if: needs.changes.outputs.backend == 'true' +# run: | +# echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV +# shell: bash +# +# - name: Install poetry πŸ¦„ +# if: needs.changes.outputs.backend == 'true' +# uses: Gr1N/setup-poetry@15821dc8a61bc630db542ae4baf6a7c19a994844 # v8 +# with: +# poetry-version: ${{ env.POETRY_VERSION }} +# +# - name: Inject setuptools into poetry's runtime environment +# if: needs.changes.outputs.backend == 'true' +# run: | +# poetry self add setuptools +# +# - name: Prevent race condition in poetry build +# # More context about race condition during poetry build can be found here: +# # https://github.com/python-poetry/poetry/issues/7611#issuecomment-1747836233 +# if: needs.changes.outputs.backend == 'true' +# run: | +# poetry config installer.max-workers 1 +# +# - name: Load Poetry Cached Libraries ⬇ +# id: cache-poetry +# if: needs.changes.outputs.backend == 'true' +# uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 +# with: +# path: .venv +# key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-${{ matrix.python-version }}-${{ hashFiles('**/poetry.lock') }}-venv-${{ secrets.POETRY_CACHE_VERSION }}-${{ env.pythonLocation }} +# +# - name: Clear Poetry cache +# if: steps.cache-poetry.outputs.cache-hit == 'true' && needs.changes.outputs.backend == 'true' && contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests') +# run: rm -r .venv +# +# # Poetry >= 1.1.0b uses virtualenv to create a virtual environment. +# # The virtualenv simply doesn't work on Windows with our setup, +# # that's why we use venv to create virtual environment +# - name: Create virtual environment +# if: (steps.cache-poetry.outputs.cache-hit != 'true' || contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests')) && needs.changes.outputs.backend == 'true' +# run: python -m venv create .venv +# +# - name: Set up virtual environment +# if: needs.changes.outputs.backend == 'true' +# # Poetry on Windows cannot pick up the virtual environments directory properly, +# # and it creates a new one every time the pipeline runs. +# # This step solves this problem β€”Β it tells poetry to always use `.venv` directory inside +# # the project itself, which also makes it easier for us to determine the correct directory +# # that needs to be cached. +# run: poetry config virtualenvs.in-project true +# +# - name: Install Dependencies (Linux) πŸ“¦ +# if: needs.changes.outputs.backend == 'true' && matrix.os == 'ubuntu-22.04' +# # Poetry intermittently fails to install dependency if it is not PEP 517 compliant +# # This is a workaround for that issue +# run: | +# sudo apt-get -y install libpq-dev +# make install-full | tee .output +# if grep 'The lock file is not up to date' .output; then exit 1; fi +# make prepare-tests-ubuntu +# +# - name: Install Dependencies (Windows) πŸ“¦ +# if: needs.changes.outputs.backend == 'true' && matrix.os == 'windows-2019' +# # Restoring cache doesn't work properly on Windows due to symlinks. +# # We create symlinks for spacy models, that's why we need to clean them up +# # before caching the dependencies directory. +# # More information: https://github.com/actions/cache/issues/120 +# # Poetry intermittently fails to install dependency if it is not PEP 517 compliant +# # This is a workaround for that issue +# run: | +# $spacy_data_dir = ".venv\lib\site-packages\spacy\data" +# if (Test-Path $spacy_data_dir) { +# Get-ChildItem -Force -ErrorAction Stop $spacy_data_dir | Where-Object { if($_.Attributes -match "ReparsePoint"){$_.Delete()} } +# Remove-Item -Force -Recurse $spacy_data_dir +# New-Item -Path $spacy_data_dir -Type Directory +# } +# make install-full +# make prepare-tests-windows-gha +# +# - name: Add github workflow problem matchers +# if: needs.changes.outputs.backend == 'true' && matrix.python-version == 3.7 && matrix.os == 'ubuntu-22.04' +# # only annotate based on test runs on ubuntu: otherwise +# # all errors will be duplicated for each python / os combination +# # therefore, we only enable for the one where most tests are run +# # (tests will still run in other envs, they will just not create annotations) +# run: pip install pytest-github-actions-annotate-failures +# +# - name: Disable "LongPathsEnabled" option on Windows +# if: matrix.os == 'windows-2019' +# # On Windows laptops, a default preset prevents path names from being longer than +# # 260 characters. Some of our users can't enable this setting due to company policies. +# # We implemented a fix for model storage. The Windows container in GitHub +# # comes with the setting enabled, so we disable it here in order to ensure our tests +# # are running in an environment where long path names are prevented. +# run: | +# (Get-ItemProperty "HKLM:System\CurrentControlSet\Control\FileSystem").LongPathsEnabled +# Set-ItemProperty 'HKLM:\System\CurrentControlSet\Control\FileSystem' -Name 'LongPathsEnabled' -value 0 +# +# - name: Install ddtrace on Linux +# if: needs.changes.outputs.backend == 'true' && matrix.os == 'ubuntu-22.04' +# run: poetry run pip install -U 'ddtrace<2.0.0' +# +# - name: Install ddtrace on Windows +# if: needs.changes.outputs.backend == 'true' && matrix.os == 'windows-2019' +# run: | +# .\.venv\Scripts\activate +# py -m pip install -U 'ddtrace<2.0.0' +# +# - name: Test Code πŸ” (multi-process) +# if: needs.changes.outputs.backend == 'true' +# env: +# JOBS: 2 +# PYTHONIOENCODING: "utf-8" +# DD_ENV: ${{ matrix.test }} +# DD_SERVICE: rasa +# DD_ARGS: --ddtrace --ddtrace-patch-all +# run: | +# make ${{ matrix.test }} +# if [[ "${{ matrix.os }}" != "windows-2019" ]]; then +# mv .coverage ${{ github.workspace }}/${{ matrix.test }}-coverage +# fi +# shell: bash # bash shell is a way to make code run for both Linux and Windows +# +# - name: Store coverage reports +# if: needs.changes.outputs.backend == 'true' && matrix.os == 'ubuntu-22.04' +# uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce +# with: +# name: ${{ matrix.test }}-coverage +# path: | +# ${{ github.workspace }}/${{ matrix.test }}-coverage +# +# test-flaky: +# name: Run Flaky Tests +# if: github.ref_type != 'tag' +# runs-on: ${{ matrix.os }} +# timeout-minutes: 60 +# needs: [changes] +# strategy: +# fail-fast: false +# matrix: +# os: [ubuntu-22.04, windows-2019] +# python-version: [3.8, 3.9, "3.10"] +# +# steps: +# - name: Run DataDog Agent +# if: needs.changes.outputs.backend == 'true' && (matrix.os != 'windows-2019' || contains(github.event.pull_request.labels.*.name, 'tools:datadog-windows')) +# run: | +# docker run --name dd_agent -p 8126:8126 -d -e "DD_API_KEY=${{ secrets.DD_API_KEY }}" -e "DD_INSIDE_CI=true" -e "DD_HOSTNAME=none" -e "DD_SITE=datadoghq.eu" -e GITHUB_ACTIONS=true -e CI=true datadog/agent:latest +# docker ps --all --filter name=dd_agent --filter status=running --no-trunc --format "{{.ID}} {{.Status}}" +# docker port dd_agent +# +# - name: Checkout git repository πŸ• +# if: needs.changes.outputs.backend == 'true' +# uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c +# +# - name: Set up Python ${{ matrix.python-version }} 🐍 +# if: needs.changes.outputs.backend == 'true' +# uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b +# with: +# python-version: ${{ matrix.python-version }} +# +# - name: Read Poetry Version πŸ”’ +# if: needs.changes.outputs.backend == 'true' +# run: | +# echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV +# shell: bash +# +# - name: Install poetry πŸ¦„ +# if: needs.changes.outputs.backend == 'true' +# uses: Gr1N/setup-poetry@15821dc8a61bc630db542ae4baf6a7c19a994844 # v8 +# with: +# poetry-version: ${{ env.POETRY_VERSION }} +# +# - name: Inject setuptools into poetry's runtime environment +# if: needs.changes.outputs.backend == 'true' +# run: | +# poetry self add setuptools +# +# - name: Load Poetry Cached Libraries ⬇ +# id: cache-poetry +# if: needs.changes.outputs.backend == 'true' +# uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 +# with: +# path: .venv +# key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-${{ matrix.python-version }}-${{ hashFiles('**/poetry.lock') }}-venv-${{ secrets.POETRY_CACHE_VERSION }}-${{ env.pythonLocation }} +# +# - name: Clear Poetry cache +# if: steps.cache-poetry.outputs.cache-hit == 'true' && needs.changes.outputs.backend == 'true' && contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests') +# run: rm -r .venv +# +# # Poetry >= 1.1.0b uses virtualenv to create a virtual environment. +# # The virtualenv simply doesn't work on Windows with our setup, +# # that's why we use venv to create virtual environment +# - name: Create virtual environment +# if: (steps.cache-poetry.outputs.cache-hit != 'true' || contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests')) && needs.changes.outputs.backend == 'true' +# run: python -m venv create .venv +# +# - name: Set up virtual environment +# if: needs.changes.outputs.backend == 'true' +# # Poetry on Windows cannot pick up the virtual environments directory properly, +# # and it creates a new one every time the pipeline runs. +# # This step solves this problem β€”Β it tells poetry to always use `.venv` directory inside +# # the project itself, which also makes it easier for us to determine the correct directory +# # that needs to be cached. +# run: poetry config virtualenvs.in-project true +# +# - name: Install Dependencies (Linux) πŸ“¦ +# if: needs.changes.outputs.backend == 'true' && matrix.os == 'ubuntu-22.04' +# run: | +# sudo apt-get -y install libpq-dev +# make install-full | tee .output +# if grep 'The lock file is not up to date' .output; then exit 1; fi +# make prepare-tests-ubuntu +# +# - name: Install Dependencies (Windows) πŸ“¦ +# if: needs.changes.outputs.backend == 'true' && matrix.os == 'windows-2019' +# # Restoring cache doesn't work properly on Windows due to symlinks. +# # We create symlinks for spacy models, that's why we need to clean them up +# # before caching the dependencies' directory. +# # More information: https://github.com/actions/cache/issues/120 +# run: | +# $spacy_data_dir = ".venv\lib\site-packages\spacy\data" +# if (Test-Path $spacy_data_dir) { +# Get-ChildItem -Force -ErrorAction Stop $spacy_data_dir | Where-Object { if($_.Attributes -match "ReparsePoint"){$_.Delete()} } +# Remove-Item -Force -Recurse $spacy_data_dir +# New-Item -Path $spacy_data_dir -Type Directory +# } +# make install-full +# make prepare-tests-windows-gha +# +# - name: Add github workflow problem matchers +# if: needs.changes.outputs.backend == 'true' && matrix.python-version == 3.7 && matrix.os == 'ubuntu-22.04' +# # only annotate based on test runs on ubuntu: otherwise +# # all errors will be duplicated for each python / os combination +# # therefore, we only enable for the one where most tests are run +# # (tests will still run in other envs, they will just not create annotations) +# run: pip install pytest-github-actions-annotate-failures +# +# - name: Disable "LongPathsEnabled" option on Windows +# if: matrix.os == 'windows-2019' +# # On Windows laptops, a default preset prevents path names from being longer than +# # 260 characters. Some of our users can't enable this setting due to company policies. +# # We implemented a fix for model storage. The Windows container in GitHub +# # comes with the setting enabled, so we disable it here in order to ensure our tests +# # are running in an environment where long path names are prevented. +# run: | +# (Get-ItemProperty "HKLM:System\CurrentControlSet\Control\FileSystem").LongPathsEnabled +# Set-ItemProperty 'HKLM:\System\CurrentControlSet\Control\FileSystem' -Name 'LongPathsEnabled' -value 0 +# +# - name: Install ddtrace on Linux +# if: needs.changes.outputs.backend == 'true' && matrix.os == 'ubuntu-22.04' +# run: poetry run pip install -U 'ddtrace<2.0.0' +# +# - name: Install ddtrace on Windows +# if: needs.changes.outputs.backend == 'true' && matrix.os == 'windows-2019' +# run: | +# .\.venv\Scripts\activate +# py -m pip install -U 'ddtrace<2.0.0' +# +# - name: Test Code πŸ” (multi-process) +# if: needs.changes.outputs.backend == 'true' +# env: +# JOBS: 2 +# PYTHONIOENCODING: "utf-8" +# DD_ENV: test-flaky +# DD_SERVICE: rasa +# DD_ARGS: --ddtrace --ddtrace-patch-all +# run: | +# make test-flaky +# if [[ "${{ matrix.os }}" != "windows-2019" ]]; then +# mv .coverage ${{ github.workspace }}/test-flaky-coverage +# fi +# shell: bash # bash shell is a way to make code run for both Linux and Windows +# +# - name: Store coverage reports +# if: needs.changes.outputs.backend == 'true' && matrix.os == 'ubuntu-22.04' +# uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce +# with: +# name: ${{ matrix.test }}-coverage +# path: | +# ${{ github.workspace }}/${{ matrix.test }}-coverage +# +# upload_coverage_reports: +# name: Upload coverage reports to codeclimate +# if: github.ref_type != 'tag' +# runs-on: ubuntu-22.04 +# # Always upload results even if tests failed +# needs: +# - test +# - changes +# +# steps: +# - name: Checkout git repository πŸ• +# if: needs.changes.outputs.backend == 'true' +# uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c +# +# - name: Set up Python 3.10 🐍 +# uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b +# with: +# python-version: "3.10" +# +# - name: Get backend coverage reports +# if: needs.changes.outputs.backend == 'true' +# uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a +# with: +# path: ${{ github.workspace }}/tests_coverage +# +# - name: Merge all reports +# if: needs.changes.outputs.backend == 'true' +# run: | +# subs=`ls ${{ github.workspace }}/tests_coverage` +# download_dir="${{ github.workspace }}/tests_coverage" +# final_dir="${{ github.workspace }}/tests_coverage/final" +# +# # Downloaded artifacts go into folders, gotta extract them all into one folder for upload +# mkdir "${final_dir}/" +# for i in $subs; do +# mv "${download_dir}/$i"/* "${final_dir}/" +# done +# +# pip install coverage +# coverage combine "${final_dir}/"* +# coverage xml +# +# - name: Upload reports to codeclimate +# if: needs.changes.outputs.backend == 'true' +# uses: paambaati/codeclimate-action@b649ad206d2e83dafb9ed130deba698aa1b41d78 +# env: +# CC_TEST_REPORTER_ID: ${{ secrets.CODECLIMATE_REPORTER_ID }} +# with: +# coverageLocations: | +# ${{ github.workspace }}/coverage.xml:coverage.py +# debug: true +# +# integration_test: +# name: Run Non-Sequential Integration Tests +# if: github.ref_type != 'tag' +# runs-on: ubuntu-22.04 +# timeout-minutes: 60 +# needs: [changes] +# env: +# REDIS_HOST: localhost +# REDIS_PORT: 6379 +# POSTGRES_HOST: localhost +# POSTGRES_PORT: 5432 +# POSTGRES_USER: postgres +# POSTGRES_PASSWORD: postgres +# RABBITMQ_HOST: localhost +# RABBITMQ_PORT: 5672 +# RABBITMQ_USER: guest +# RABBITMQ_PASSWORD: guest +# +# services: +# redis: +# image: redis:6 +# # Set health checks to wait until redis has started +# options: >- +# --health-cmd "redis-cli ping" +# --health-interval 10s +# --health-timeout 5s +# --health-retries 5 +# ports: +# # FIXME: cannot use ${{ env.REDIS_PORT }} here +# # mapping container ports to the host +# - 6379:6379 +# +# postgres: +# image: postgres:13 +# # Set health checks to wait until postgres has started +# options: >- +# --health-cmd pg_isready +# --health-interval 10s +# --health-timeout 5s +# --health-retries 5 +# env: +# # postgres image requires password to be set +# POSTGRES_PASSWORD: ${{ env.POSTGRES_PASSWORD }} +# ports: +# # FIXME: cannot use ${{ env.POSTGRES_PORT }} here +# # mapping container ports to the host +# - 5432:5432 +# +# rabbitmq: +# # see https://github.com/docker-library/healthcheck/blob/master/rabbitmq/docker-healthcheck +# image: healthcheck/rabbitmq +# ports: +# - 5672:5672 +# +# mongodb: +# image: mongodb/mongodb-community-server:6.0.4-ubuntu2204 +# options: >- +# --health-cmd "echo 'db.runCommand("ping").ok' | mongosh --quiet" +# --health-interval 10s +# --health-timeout 5s +# --health-retries 5 +# ports: +# - 27017:27017 +# +# steps: +# - name: Checkout git repository πŸ• +# if: needs.changes.outputs.backend == 'true' +# uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c +# +# - name: Set up Python ${{ env.DEFAULT_PYTHON_VERSION }} 🐍 +# if: needs.changes.outputs.backend == 'true' +# uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b +# with: +# python-version: ${{ env.DEFAULT_PYTHON_VERSION }} +# +# - name: Read Poetry Version πŸ”’ +# if: needs.changes.outputs.backend == 'true' +# run: | +# echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV +# shell: bash +# +# - name: Install poetry πŸ¦„ +# if: needs.changes.outputs.backend == 'true' +# uses: Gr1N/setup-poetry@15821dc8a61bc630db542ae4baf6a7c19a994844 # v8 +# with: +# poetry-version: ${{ env.POETRY_VERSION }} +# +# - name: Load Poetry Cached Libraries ⬇ +# id: cache-poetry +# if: needs.changes.outputs.backend == 'true' +# uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 +# with: +# path: .venv +# key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-${{ env.DEFAULT_PYTHON_VERSION }}-${{ hashFiles('**/poetry.lock') }}-venv-${{ secrets.POETRY_CACHE_VERSION }}-${{ env.pythonLocation }} +# +# - name: Clear Poetry cache +# if: steps.cache-poetry.outputs.cache-hit == 'true' && needs.changes.outputs.backend == 'true' && contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests') +# run: rm -r .venv +# +# # Poetry >= 1.1.0b uses virtualenv to create a virtual environment. +# # The virtualenv simply doesn't work on Windows with our setup, +# # that's why we use venv to create virtual environment +# - name: Create virtual environment +# if: (steps.cache-poetry.outputs.cache-hit != 'true' || contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests')) && needs.changes.outputs.backend == 'true' +# run: python -m venv create .venv +# +# - name: Set up virtual environment +# if: needs.changes.outputs.backend == 'true' +# # Poetry on Windows cannot pick up the virtual environments directory properly, +# # and it creates a new one every time the pipeline runs. +# # This step solves this problem β€”Β it tells poetry to always use `.venv` directory inside +# # the project itself, which also makes it easier for us to determine the correct directory +# # that needs to be cached. +# run: poetry config virtualenvs.in-project true +# +# - name: Install Dependencies (Linux) πŸ“¦ +# if: needs.changes.outputs.backend == 'true' +# run: | +# sudo apt-get -y install libpq-dev +# make install-full | tee .output +# if grep 'The lock file is not up to date' .output; then exit 1; fi +# make prepare-tests-ubuntu +# +# - name: Run kafka and zookeeper containers for integration testing +# if: needs.changes.outputs.backend == 'true' +# run: | +# docker-compose -f tests_deployment/docker-compose.kafka.yml up -d +# +# - name: Test Code with Services 🩺 +# if: needs.changes.outputs.backend == 'true' +# env: +# JOBS: 2 +# INTEGRATION_TEST_PYTEST_MARKERS: '"not sequential"' +# PYTHONIOENCODING: "utf-8" +# run: | +# make test-integration +# +# sequential_integration_test: +# name: Run Sequential Integration Tests +# if: github.ref_type != 'tag' +# runs-on: ubuntu-20.04 +# timeout-minutes: 60 +# needs: [changes] +# env: +# POSTGRES_HOST: localhost +# POSTGRES_PORT: 5432 +# POSTGRES_USER: postgres +# POSTGRES_PASSWORD: postgres +# +# services: +# postgres: +# image: postgres:13 +# # Set health checks to wait until postgres has started +# options: >- +# --health-cmd pg_isready +# --health-interval 10s +# --health-timeout 5s +# --health-retries 5 +# env: +# # postgres image requires password to be set +# POSTGRES_PASSWORD: ${{ env.POSTGRES_PASSWORD }} +# ports: +# # FIXME: cannot use ${{ env.POSTGRES_PORT }} here +# # mapping container ports to the host +# - 5432:5432 +# +# steps: +# - name: Checkout git repository πŸ• +# if: needs.changes.outputs.backend == 'true' +# uses: actions/checkout@v3 +# +# - name: Set up Python ${{ env.DEFAULT_PYTHON_VERSION }} 🐍 +# if: needs.changes.outputs.backend == 'true' +# uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b +# with: +# python-version: ${{ env.DEFAULT_PYTHON_VERSION }} +# +# - name: Read Poetry Version πŸ”’ +# if: needs.changes.outputs.backend == 'true' +# run: | +# echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV +# shell: bash +# +# - name: Install poetry πŸ¦„ +# if: needs.changes.outputs.backend == 'true' +# uses: Gr1N/setup-poetry@15821dc8a61bc630db542ae4baf6a7c19a994844 # v8 +# with: +# poetry-version: ${{ env.POETRY_VERSION }} +# +# - name: Load Poetry Cached Libraries ⬇ +# id: cache-poetry +# if: needs.changes.outputs.backend == 'true' +# uses: actions/cache@v3 +# with: +# path: .venv +# key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-${{ env.DEFAULT_PYTHON_VERSION }}-${{ hashFiles('**/poetry.lock') }}-venv-${{ secrets.POETRY_CACHE_VERSION }}-${{ env.pythonLocation }} +# +# - name: Clear Poetry cache +# if: steps.cache-poetry.outputs.cache-hit == 'true' && needs.changes.outputs.backend == 'true' && contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests') +# run: rm -r .venv +# +# # Poetry >= 1.1.0b uses virtualenv to create a virtual environment. +# # The virtualenv simply doesn't work on Windows with our setup, +# # that's why we use venv to create virtual environment +# - name: Create virtual environment +# if: (steps.cache-poetry.outputs.cache-hit != 'true' || contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests')) && needs.changes.outputs.backend == 'true' +# run: python -m venv create .venv +# +# - name: Set up virtual environment +# if: needs.changes.outputs.backend == 'true' +# # Poetry on Windows cannot pick up the virtual environments directory properly, +# # and it creates a new one every time the pipeline runs. +# # This step solves this problem β€”Β it tells poetry to always use `.venv` directory inside +# # the project itself, which also makes it easier for us to determine the correct directory +# # that needs to be cached. +# run: poetry config virtualenvs.in-project true +# +# - name: Install Dependencies (Linux) πŸ“¦ +# if: needs.changes.outputs.backend == 'true' +# run: | +# sudo apt-get -y install libpq-dev +# make install-full | tee .output +# if grep 'The lock file is not up to date' .output; then exit 1; fi +# make prepare-tests-ubuntu +# +# # these integration tests need to be ran in a sequential fashion, +# # due to environment constraints, so we're running them in a single process. +# - name: Test Code with Services 🩺 (sequential) +# if: needs.changes.outputs.backend == 'true' +# env: +# JOBS: 1 +# INTEGRATION_TEST_PYTEST_MARKERS: "sequential" +# PYTHONIOENCODING: "utf-8" +# run: | +# make test-integration +# +# - name: Stop kafka and zookeeper containers for integration testing +# if: needs.changes.outputs.backend == 'true' +# run: | +# docker-compose -f tests_deployment/docker-compose.kafka.yml down +# +# build_docker_base_images_and_set_env: +# name: Build Docker base images and setup environment +# runs-on: ubuntu-22.04 +# outputs: +# base_image_hash: ${{ steps.check_image.outputs.base_image_hash }} +# base_mitie_image_hash: ${{ steps.check_image.outputs.base_mitie_image_hash }} +# base_builder_image_hash: ${{ steps.check_image.outputs.base_builder_image_hash }} +# # Tag name used for images created during Docker image builds, e.g. 3886 - a PR number +# image_tag: ${{ steps.set_output.outputs.image_tag }} +# # Return 'true' if tag version is equal or higher than the latest tagged Rasa version +# is_newest_version: ${{ steps.rasa_get_version.outputs.is_newest_version }} +# +# steps: +# # Due to an issue with checking out a wrong commit, we make sure +# # to checkout HEAD commit for a pull request. +# # More details: https://github.com/actions/checkout/issues/299 +# - name: Checkout pull request HEAD commit instead of merge commit πŸ• +# uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c +# if: github.event_name == 'pull_request' +# with: +# ref: ${{ github.event.pull_request.head.sha }} +# +# - name: Checkout git repository πŸ• +# uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c +# if: github.event_name != 'pull_request' +# +# - name: Set up QEMU +# uses: docker/setup-qemu-action@2b82ce82d56a2a04d2637cd93a637ae1b359c0a7 # v2.2.0 +# +# - name: Set up Docker Buildx +# uses: docker/setup-buildx-action@ecf95283f03858871ff00b787d79c419715afc34 # v2.7.0 +# +# - name: Read Poetry Version πŸ”’ +# run: | +# echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV +# shell: bash +# +# - name: Login to DockerHub Registry πŸ”’ +# run: echo ${{ secrets.DOCKERHUB_PASSWORD }} | docker login -u ${{ env.DOCKERHUB_USERNAME }} --password-stdin || true +# +# - name: Check if tag version is equal or higher than the latest tagged Rasa version +# id: rasa_get_version +# if: env.IS_TAG_BUILD == 'true' +# run: | +# # Get latest tagged Rasa version +# git fetch --depth=1 origin "+refs/tags/*:refs/tags/*" +# # Fetch branch history +# git fetch --prune --unshallow +# LATEST_TAGGED_NON_ALPHA_RASA_VERSION=$(git tag | sort -r -V | grep -E "^[0-9.]+$" | head -n1) +# CURRENT_TAG=${GITHUB_REF#refs/tags/} +# # Return 'true' if tag version is equal or higher than the latest tagged Rasa version +# IS_NEWEST_VERSION=$((printf '%s\n%s\n' "${LATEST_TAGGED_NON_ALPHA_RASA_VERSION}" "$CURRENT_TAG" \ +# | sort -V -C && echo true || echo false) || true) +# # Avoid that the script gets released for alphas or release candidates +# if [[ "${IS_NEWEST_VERSION}" == "true" && "$CURRENT_TAG" =~ ^[0-9.]+$ ]]; then +# echo "is_newest_version=true" >> $GITHUB_OUTPUT +# else +# echo "is_newest_version=false" >> $GITHUB_OUTPUT +# fi +# +# - name: Check if a base image exists +# id: check_image +# env: +# DOCKER_CLI_EXPERIMENTAL: enabled +# run: | +# # Base image +# BASE_IMAGE_HASH=${{ hashFiles('docker/Dockerfile.base') }} +# echo "base_image_hash=${BASE_IMAGE_HASH}" >> $GITHUB_OUTPUT +# +# BASE_IMAGE_EXISTS=$((docker manifest inspect rasa/rasa:base-${BASE_IMAGE_HASH} &> /dev/null && echo true || echo false) || true) +# echo "base_exists=${BASE_IMAGE_EXISTS}" >> $GITHUB_OUTPUT +# +# # Base MITIE image +# BASE_MITIE_IMAGE_HASH=${{ hashFiles('docker/Dockerfile.base-mitie') }} +# MAKEFILE_MITIE_HASH=${{ hashFiles('Makefile') }} +# echo "base_mitie_image_hash=${BASE_MITIE_IMAGE_HASH:0:50}-${MAKEFILE_MITIE_HASH:0:50}" >> $GITHUB_OUTPUT +# +# BASE_IMAGE_MITIE_EXISTS=$((docker manifest inspect rasa/rasa:base-mitie-${BASE_MITIE_IMAGE_HASH:0:50}-${MAKEFILE_MITIE_HASH:0:50} &> /dev/null && echo true || echo false) || true) +# echo "base_mitie_exists=${BASE_IMAGE_MITIE_EXISTS}" >> $GITHUB_OUTPUT +# +# # Base poetry image +# BASE_IMAGE_POETRY_EXISTS=$((docker manifest inspect rasa/rasa:base-poetry-${{ env.POETRY_VERSION }} &> /dev/null && echo true || echo false) || true) +# echo "base_poetry_exists=${BASE_IMAGE_POETRY_EXISTS}" >> $GITHUB_OUTPUT +# +# # Base builder image +# BASE_IMAGE_BUILDER_HASH=${{ hashFiles('docker/Dockerfile.base-builder') }}-poetry-${{ env.POETRY_VERSION }} +# echo "base_builder_image_hash=${BASE_IMAGE_BUILDER_HASH}" >> $GITHUB_OUTPUT +# +# BASE_IMAGE_BUILDER_EXISTS=$((docker manifest inspect rasa/rasa:base-builder-${BASE_IMAGE_BUILDER_HASH} &> /dev/null && echo true || echo false) || true) +# echo "base_builder_exists=${BASE_IMAGE_BUILDER_EXISTS}" >> $GITHUB_OUTPUT +# +# - name: Build Docker base image πŸ›  +# if: steps.check_image.outputs.base_exists == 'false' || env.IS_TAG_BUILD == 'true' +# run: | +# export IMAGE_TAG=${{ steps.check_image.outputs.base_image_hash }} +# docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl base +# +# - name: Push Docker base image if it's not building from a fork ⬆ +# if: (steps.check_image.outputs.base_exists == 'false' || env.IS_TAG_BUILD == 'true') && github.event.pull_request.head.repo.owner.login == 'RasaHQ' +# run: | +# export IMAGE_TAG=${{ steps.check_image.outputs.base_image_hash }} +# docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl base --push +# +# - name: Build Docker mitie base image πŸ›  +# if: steps.check_image.outputs.base_mitie_exists == 'false' || steps.check_image.outputs.base_exists == 'false' +# run: | +# export IMAGE_TAG=${{ steps.check_image.outputs.base_mitie_image_hash }} +# docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl base-mitie +# +# - name: Push Docker mitie base image if it's not building from a fork ⬆ +# if: (steps.check_image.outputs.base_mitie_exists == 'false' || steps.check_image.outputs.base_exists == 'false') && github.event.pull_request.head.repo.owner.login == 'RasaHQ' +# run: | +# export IMAGE_TAG=${{ steps.check_image.outputs.base_mitie_image_hash }} +# docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl base-mitie --push +# +# - name: Build Docker poetry base image πŸ›  +# if: steps.check_image.outputs.base_poetry_exists == 'false' || steps.check_image.outputs.base_exists == 'false' +# run: | +# export IMAGE_TAG=${{ env.POETRY_VERSION }} +# export BASE_IMAGE_HASH=${{ steps.check_image.outputs.base_image_hash }} +# docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl base-poetry +# +# - name: Push Docker poetry base image if it's not building from a fork ⬆ +# if: (steps.check_image.outputs.base_poetry_exists == 'false' || steps.check_image.outputs.base_exists == 'false') && github.event.pull_request.head.repo.owner.login == 'RasaHQ' +# run: | +# export IMAGE_TAG=${{ env.POETRY_VERSION }} +# export BASE_IMAGE_HASH=${{ steps.check_image.outputs.base_image_hash }} +# docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl base-poetry --push +# +# - name: Build Docker builder base image πŸ›  +# if: steps.check_image.outputs.base_builder_exists == 'false' || steps.check_image.outputs.base_exists == 'false' +# run: | +# export IMAGE_TAG=${{ steps.check_image.outputs.base_builder_image_hash }} +# docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl base-builder +# +# - name: Push Docker builder base image if it's not building from a fork ⬆ +# if: (steps.check_image.outputs.base_builder_exists == 'false' || steps.check_image.outputs.base_exists == 'false') && github.event.pull_request.head.repo.owner.login == 'RasaHQ' +# run: | +# export IMAGE_TAG=${{ steps.check_image.outputs.base_builder_image_hash }} +# docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl base-builder --push +# +# # Set environment variables for a pull request +# # +# # In this scenario, we've created a PR #1234 +# # +# # Example output: +# # IMAGE_TAG=1234 +# - name: Set environment variables - pull_request +# if: github.event_name == 'pull_request' && env.IS_TAG_BUILD == 'false' +# run: | +# echo "IMAGE_TAG=${{ github.event.number }}" >> $GITHUB_ENV +# +# # Set environment variables for a tag +# # +# # In this scenario, we've pushed the '2.0.6' tag +# # +# # Example output: +# # TAG_NAME=2.0.6 +# # IMAGE_TAG=2.0.6 +# - name: Set environment variables - push - tag +# if: github.event_name == 'push' && env.IS_TAG_BUILD == 'true' +# run: | +# TAG_NAME=${GITHUB_REF#refs/tags/} +# echo "IMAGE_TAG=${TAG_NAME}" >> $GITHUB_ENV +# +# # Set environment variables for a branch +# # +# # In this scenario, we've pushed changes into the main branch +# # +# # Example output: +# # IMAGE_TAG=main +# - name: Set environment variables - push - branch +# if: github.event_name == 'push' && env.IS_TAG_BUILD == 'false' +# run: | +# BRANCH_NAME=${GITHUB_REF#refs/heads/} +# SAFE_BRANCH_NAME="$(echo ${GITHUB_REF#refs/heads/} | sed 's/[\\*+.$\#\-\/]/-/g')" +# echo "IMAGE_TAG=${SAFE_BRANCH_NAME}" >> $GITHUB_ENV +# +# - name: Set output +# id: set_output +# run: | +# echo "image_tag=${{ env.IMAGE_TAG }}" >> $GITHUB_OUTPUT +# +# docker: +# name: Build Docker +# runs-on: ubuntu-22.04 +# needs: [changes, build_docker_base_images_and_set_env] +# env: +# IMAGE_TAG: ${{ needs.build_docker_base_images_and_set_env.outputs.image_tag }} +# BASE_IMAGE_HASH: ${{ needs.build_docker_base_images_and_set_env.outputs.base_image_hash }} +# BASE_MITIE_IMAGE_HASH: ${{ needs.build_docker_base_images_and_set_env.outputs.base_mitie_image_hash }} +# BASE_BUILDER_IMAGE_HASH: ${{ needs.build_docker_base_images_and_set_env.outputs.base_builder_image_hash }} +# +# strategy: +# matrix: +# image: [default, full, mitie-en, spacy-de, spacy-it, spacy-en] +# +# steps: +# # Due to an issue with checking out a wrong commit, we make sure +# # to checkout HEAD commit for a pull request. +# # More details: https://github.com/actions/checkout/issues/299 +# - name: Checkout pull request HEAD commit instead of merge commit πŸ• +# uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c +# if: github.event_name == 'pull_request' +# with: +# ref: ${{ github.event.pull_request.head.sha }} +# +# - name: Checkout git repository πŸ• +# uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c +# if: github.event_name != 'pull_request' +# +# - name: Set up QEMU +# uses: docker/setup-qemu-action@2b82ce82d56a2a04d2637cd93a637ae1b359c0a7 # v2.2.0 +# +# - name: Set up Docker Buildx +# uses: docker/setup-buildx-action@ecf95283f03858871ff00b787d79c419715afc34 # v2.7.0 +# +# - name: Free disk space +# if: needs.changes.outputs.docker == 'true' +# # tries to make sure we do not run out of disk space, see +# # https://github.community/t5/GitHub-Actions/BUG-Strange-quot-No-space-left-on-device-quot-IOExceptions-on/td-p/46101 +# run: | +# sudo swapoff -a +# sudo rm -f /swapfile +# sudo rm -rf "$AGENT_TOOLSDIRECTORY" +# sudo apt clean +# docker image prune -a -f +# docker volume prune -f +# docker container prune -f +# df -h +# +# - name: Read Poetry Version πŸ”’ +# run: | +# echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV +# shell: bash +# +# - name: Echo Available platforms +# run: echo ${{ steps.buildx.outputs.platforms }} +# +# - name: Login to DockerHub Registry πŸ”’ +# if: needs.changes.outputs.docker == 'true' +# run: echo ${{ secrets.DOCKERHUB_PASSWORD }} | docker login -u ${{ env.DOCKERHUB_USERNAME }} --password-stdin || true +# +# - name: Copy Segment write key to the package +# if: needs.changes.outputs.docker == 'true' && github.event_name == 'push' && startsWith(github.ref, 'refs/tags') && github.repository == 'RasaHQ/rasa' +# env: +# RASA_TELEMETRY_WRITE_KEY: ${{ secrets.RASA_OSS_TELEMETRY_WRITE_KEY }} +# RASA_EXCEPTION_WRITE_KEY: ${{ secrets.RASA_OSS_EXCEPTION_WRITE_KEY }} +# run: | +# ./scripts/write_keys_file.sh +# +# - name: Build Docker image +# if: needs.changes.outputs.docker == 'true' +# run: | +# docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl ${{ matrix.image }} +# +# - name: Check how much space is left after Docker build +# run: df -h +# +# - name: Push image with main tag πŸ“¦ +# if: needs.changes.outputs.docker == 'true' && github.event_name == 'push' && github.ref == 'refs/heads/main' && github.repository == 'RasaHQ/rasa' +# run: | +# docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl ${{ matrix.image }} --push +# +# - name: Push image with ${{github.ref}} tag πŸ“¦ +# if: needs.changes.outputs.docker == 'true' && github.event_name == 'push' && env.IS_TAG_BUILD == 'true' && github.repository == 'RasaHQ/rasa' +# run: | +# IS_NEWEST_VERSION=${{ needs.build_docker_base_images_and_set_env.outputs.is_newest_version }} +# +# docker buildx bake --set *.platform=linux/amd64,linux/arm64 -f docker/docker-bake.hcl ${{ matrix.image }} --push +# +## # Tag the image as latest +## if [[ "${IS_NEWEST_VERSION}" == "true" ]]; then +## if [[ "${{ matrix.image }}" == "default" ]]; then +## RELEASE_TAG="${IMAGE_TAG}" +## else +## RELEASE_TAG="${IMAGE_TAG}-${{ matrix.image }}" +## fi +## +## LATEST_TAG=$(echo $RELEASE_TAG | sed 's/'$IMAGE_TAG'/latest/g') +## +## docker tag rasa/rasa:${RELEASE_TAG} rasa/rasa:${LATEST_TAG} +## docker push rasa/rasa:${LATEST_TAG} +## fi +# +# deploy: +# name: Deploy to PyPI +# runs-on: ubuntu-22.04 +# +# # deploy will only be run when there is a tag available +# if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') && github.repository == 'RasaHQ/rasa' +# needs: [docker] # only run after the docker build stage succeeds +# +# steps: +# - name: Checkout git repository πŸ• +# uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c +# +# - name: Set up Python 3.9 🐍 +# uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b +# with: +# python-version: 3.9 +# +# - name: Read Poetry Version πŸ”’ +# run: | +# echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV +# shell: bash +# +# - name: Install poetry πŸ¦„ +# uses: Gr1N/setup-poetry@15821dc8a61bc630db542ae4baf6a7c19a994844 # v8 +# with: +# poetry-version: ${{ env.POETRY_VERSION }} +# +# - name: Copy Segment write key to the package +# env: +# RASA_TELEMETRY_WRITE_KEY: ${{ secrets.RASA_OSS_TELEMETRY_WRITE_KEY }} +# RASA_EXCEPTION_WRITE_KEY: ${{ secrets.RASA_OSS_EXCEPTION_WRITE_KEY }} +# run: | +# ./scripts/write_keys_file.sh +# +# - name: Build βš’οΈ Distributions +# run: poetry build +# +# - name: Publish to PyPI πŸ“¦ +# uses: pypa/gh-action-pypi-publish@c7f29f7adef1a245bd91520e94867e5c6eedddcc +# with: +# user: __token__ +# password: ${{ secrets.PYPI_TOKEN }} +# +# - name: Notify Sentry about the release +# env: +# GITHUB_TAG: ${{ github.ref }} +# SENTRY_ORG: rasahq +# SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} +# run: | +# curl -sL https://sentry.io/get-cli/ | bash +# GITHUB_TAG=${GITHUB_TAG/refs\/tags\//} +# sentry-cli releases new -p rasa-open-source "rasa-$GITHUB_TAG" +# sentry-cli releases set-commits --auto "rasa-$GITHUB_TAG" +# sentry-cli releases finalize "rasa-$GITHUB_TAG" +# +# - name: Notify Slack & Publish Release Notes πŸ—ž +# env: +# GH_RELEASE_NOTES_TOKEN: ${{ secrets.GH_RELEASE_NOTES_TOKEN }} +# SLACK_WEBHOOK_TOKEN: ${{ secrets.SLACK_WEBHOOK_TOKEN }} +# GITHUB_TAG: ${{ github.ref }} +# GITHUB_REPO_SLUG: ${{ github.repository }} +# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} +# run: | +# GITHUB_TAG=${GITHUB_TAG/refs\/tags\//} +# pip install -U github3.py pep440-version-utils +# python3 scripts/publish_gh_release_notes.py +# ./scripts/ping_slack_about_package_release.sh +# +# send_slack_notification_for_release_on_failure: +# name: Notify Slack & Publish Release Notes +# runs-on: ubuntu-22.04 +# # run this job when the workflow is triggered by a tag push +# if: always() && github.repository == 'RasaHQ/rasa' && github.ref_type == 'tag' +# needs: +# - deploy +# +# steps: +# - name: Notify Slack of failure ⛔️ +# # send notification if 'deploy' is skipped (previous needed job failed) or failed +# if: needs.deploy.result != 'success' +# env: +# SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_TOKEN }} +# uses: Ilshidur/action-slack@689ad44a9c9092315abd286d0e3a9a74d31ab78a +# with: +# args: "⛔️ *Rasa Open Source* version `${{ github.ref_name }}` could not be released 😱! Please check out GitHub Actions: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml index 076f44d0c612..7b24e2657c28 100644 --- a/.github/workflows/documentation.yml +++ b/.github/workflows/documentation.yml @@ -98,228 +98,228 @@ jobs: fi fi - prebuild_docs: - name: Prebuild Docs - runs-on: ubuntu-22.04 - needs: [evaluate_release_tag] - # don't run this for main branches of forks, would fail anyways - if: github.repository == 'RasaHQ/rasa' && needs.evaluate_release_tag.outputs.build_docs == 'true' && github.ref != 'refs/heads/documentation' && github.event_name != 'pull_request' - - steps: - - name: Checkout git repository πŸ• - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - - - name: Set up Python 3.10 🐍 - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b - with: - python-version: '3.10' - - - name: Set up Node 12.x πŸ¦™ - uses: actions/setup-node@64ed1c7eab4cce3362f8c340dee64e5eaeef8f7c - with: - node-version: "12.x" - - - name: Read Poetry Version πŸ”’ - run: | - echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV - shell: bash - - - name: Install poetry πŸ¦„ - uses: Gr1N/setup-poetry@15821dc8a61bc630db542ae4baf6a7c19a994844 # v8 - with: - poetry-version: ${{ env.POETRY_VERSION }} - - - name: Load Poetry Cached Libraries ⬇ - id: cache-poetry - uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 - with: - path: .venv - key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-3.9-non-full-${{ hashFiles('**/poetry.lock') }}-${{ secrets.POETRY_CACHE_VERSION }} - restore-keys: ${{ runner.os }}-poetry-3.9-non-full - - - name: Clear Poetry cache - if: steps.cache-poetry.outputs.cache-hit == 'true' - run: rm -r .venv - - - name: Create virtual environment - if: steps.cache-poetry.outputs.cache-hit != 'true' - run: python -m venv create .venv - - - name: Set up virtual environment - run: poetry config virtualenvs.in-project true - - - name: Load Yarn Cached Packages ⬇ - uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 - with: - path: docs/node_modules - key: ${{ runner.os }}-yarn-12.x-${{ hashFiles('docs/yarn.lock') }} - restore-keys: ${{ runner.os }}-yarn-12.x - - - name: Install Dependencies πŸ“¦ - run: make install install-docs - - - name: Pre-build Docs 🧢 - run: make prepare-docs - - - name: Push docs to documentation branch πŸƒβ€β™€οΈ - env: - GH_DOCS_WRITE_KEY: ${{ secrets.GH_DOCS_WRITE_KEY }} - TMP_DOCS_FOLDER: /tmp/documentation-${{ github.run_id }} - TMP_SSH_KEY_PATH: /tmp/docs_key - run: | - eval "$(ssh-agent -s)"; touch $TMP_SSH_KEY_PATH; chmod 0600 $TMP_SSH_KEY_PATH - echo "$GH_DOCS_WRITE_KEY" > $TMP_SSH_KEY_PATH - ssh-add $TMP_SSH_KEY_PATH - - git config --global user.email "builds@github-ci.com" - git config --global user.name "GitHub CI" - git remote set-url --push origin "git@github.com:${{github.repository}}" - - ./scripts/push_docs_to_branch.sh - - - name: Notify slack on failure - if: failure() - env: - SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} - uses: voxmedia/github-action-slack-notify-build@3665186a8c1a022b28a1dbe0954e73aa9081ea9e # v1.6.0 - with: - channel_id: ${{ secrets.SLACK_ALERTS_CHANNEL_ID }} - status: FAILED - color: warning - - preview_docs: - name: Preview Docs - runs-on: ubuntu-22.04 - needs: [changes] - # don't run this for pull requests from forks - if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == 'RasaHQ/rasa' - - steps: - - name: Checkout git repository πŸ• - if: needs.changes.outputs.docs == 'true' - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - - - name: Set up Python 3.10 🐍 - if: needs.changes.outputs.docs == 'true' - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b - with: - python-version: '3.10' - - - name: Set up Node 12.x πŸ¦™ - if: needs.changes.outputs.docs == 'true' - uses: actions/setup-node@64ed1c7eab4cce3362f8c340dee64e5eaeef8f7c - with: - node-version: "12.x" - - - name: Read Poetry Version πŸ”’ - if: needs.changes.outputs.docs == 'true' - run: | - echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV - shell: bash - - - name: Install poetry πŸ¦„ - if: needs.changes.outputs.docs == 'true' - uses: Gr1N/setup-poetry@15821dc8a61bc630db542ae4baf6a7c19a994844 # v8 - with: - poetry-version: ${{ env.POETRY_VERSION }} - - - name: Load Poetry Cached Libraries ⬇ - id: cache-poetry - if: needs.changes.outputs.docs == 'true' - uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 - with: - path: .venv - key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-3.9-non-full-${{ hashFiles('**/poetry.lock') }}-${{ secrets.POETRY_CACHE_VERSION }} - restore-keys: ${{ runner.os }}-poetry-3.9-non-full - - - name: Clear Poetry cache - if: steps.cache-poetry.outputs.cache-hit == 'true' && needs.changes.outputs.docs == 'true' && contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-preview-docs') - run: rm -r .venv - - - name: Create virtual environment - if: (steps.cache-poetry.outputs.cache-hit != 'true' || contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-preview-docs')) && needs.changes.outputs.docs == 'true' - run: python -m venv create .venv - - - name: Set up virtual environment - if: needs.changes.outputs.docs == 'true' - run: poetry config virtualenvs.in-project true - - - name: Load Yarn Cached Packages ⬇ - if: needs.changes.outputs.docs == 'true' - uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 - with: - path: docs/node_modules - key: ${{ runner.os }}-yarn-12.x-${{ hashFiles('docs/yarn.lock') }} - restore-keys: ${{ runner.os }}-yarn-12.x - - - name: Install Dependencies πŸ“¦ - if: needs.changes.outputs.docs == 'true' - run: make install install-docs - - - name: Pre-build Docs 🧢 - if: needs.changes.outputs.docs == 'true' - run: make prepare-docs - - - name: Preview draft build πŸ”¬ - if: needs.changes.outputs.docs == 'true' - id: preview_draft_build - env: - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }} - DOCS_SITE_BASE_URL: /docs/rasa - PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} - run: | - make preview-docs - DEPLOY_URL="https://$PULL_REQUEST_NUMBER--rasahq-docs-rasa-v2.netlify.app${DOCS_SITE_BASE_URL}" - echo "preview_url=$DEPLOY_URL" >> $GITHUB_OUTPUT - - - name: Create a comment with help description - if: needs.changes.outputs.docs == 'true' - uses: RasaHQ/create-comment@da7b2ec20116674919493bb5894eea70fdaa6486 - with: - mode: "delete-previous" - id: comment_docs_previews - github-token: ${{ secrets.GITHUB_TOKEN }} - body: | - πŸš€ A preview of the docs have been deployed at the following URL: ${{ steps.preview_draft_build.outputs.preview_url }} - - publish_docs: - name: Publish Docs - runs-on: ubuntu-22.04 - # don't run this for main branches of forks; only run on documentation branch - if: github.repository == 'RasaHQ/rasa' && github.ref == 'refs/heads/documentation' - - steps: - - name: Checkout git repository πŸ• - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - - - name: Set up Node 12.x πŸ¦™ - uses: actions/setup-node@64ed1c7eab4cce3362f8c340dee64e5eaeef8f7c - with: - node-version: "12.x" - - - name: Load Yarn Cached Packages ⬇ - uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 - with: - path: docs/node_modules - key: ${{ runner.os }}-yarn-12.x-${{ hashFiles('docs/yarn.lock') }} - restore-keys: ${{ runner.os }}-yarn-12.x - - - name: Install Dependencies πŸ“¦ - run: make install-docs - - - name: Publish production build βœ… - env: - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }} - run: make publish-docs - - - name: Notify slack on failure - if: failure() - env: - SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} - uses: voxmedia/github-action-slack-notify-build@3665186a8c1a022b28a1dbe0954e73aa9081ea9e # v1.6.0 - with: - channel_id: ${{ secrets.SLACK_ALERTS_CHANNEL_ID }} - status: FAILED - color: warning +# prebuild_docs: +# name: Prebuild Docs +# runs-on: ubuntu-22.04 +# needs: [evaluate_release_tag] +# # don't run this for main branches of forks, would fail anyways +# if: github.repository == 'RasaHQ/rasa' && needs.evaluate_release_tag.outputs.build_docs == 'true' && github.ref != 'refs/heads/documentation' && github.event_name != 'pull_request' +# +# steps: +# - name: Checkout git repository πŸ• +# uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c +# +# - name: Set up Python 3.10 🐍 +# uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b +# with: +# python-version: '3.10' +# +# - name: Set up Node 12.x πŸ¦™ +# uses: actions/setup-node@64ed1c7eab4cce3362f8c340dee64e5eaeef8f7c +# with: +# node-version: "12.x" +# +# - name: Read Poetry Version πŸ”’ +# run: | +# echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV +# shell: bash +# +# - name: Install poetry πŸ¦„ +# uses: Gr1N/setup-poetry@15821dc8a61bc630db542ae4baf6a7c19a994844 # v8 +# with: +# poetry-version: ${{ env.POETRY_VERSION }} +# +# - name: Load Poetry Cached Libraries ⬇ +# id: cache-poetry +# uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 +# with: +# path: .venv +# key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-3.9-non-full-${{ hashFiles('**/poetry.lock') }}-${{ secrets.POETRY_CACHE_VERSION }} +# restore-keys: ${{ runner.os }}-poetry-3.9-non-full +# +# - name: Clear Poetry cache +# if: steps.cache-poetry.outputs.cache-hit == 'true' +# run: rm -r .venv +# +# - name: Create virtual environment +# if: steps.cache-poetry.outputs.cache-hit != 'true' +# run: python -m venv create .venv +# +# - name: Set up virtual environment +# run: poetry config virtualenvs.in-project true +# +# - name: Load Yarn Cached Packages ⬇ +# uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 +# with: +# path: docs/node_modules +# key: ${{ runner.os }}-yarn-12.x-${{ hashFiles('docs/yarn.lock') }} +# restore-keys: ${{ runner.os }}-yarn-12.x +# +# - name: Install Dependencies πŸ“¦ +# run: make install install-docs +# +# - name: Pre-build Docs 🧢 +# run: make prepare-docs +# +# - name: Push docs to documentation branch πŸƒβ€β™€οΈ +# env: +# GH_DOCS_WRITE_KEY: ${{ secrets.GH_DOCS_WRITE_KEY }} +# TMP_DOCS_FOLDER: /tmp/documentation-${{ github.run_id }} +# TMP_SSH_KEY_PATH: /tmp/docs_key +# run: | +# eval "$(ssh-agent -s)"; touch $TMP_SSH_KEY_PATH; chmod 0600 $TMP_SSH_KEY_PATH +# echo "$GH_DOCS_WRITE_KEY" > $TMP_SSH_KEY_PATH +# ssh-add $TMP_SSH_KEY_PATH +# +# git config --global user.email "builds@github-ci.com" +# git config --global user.name "GitHub CI" +# git remote set-url --push origin "git@github.com:${{github.repository}}" +# +# ./scripts/push_docs_to_branch.sh +# +# - name: Notify slack on failure +# if: failure() +# env: +# SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} +# uses: voxmedia/github-action-slack-notify-build@3665186a8c1a022b28a1dbe0954e73aa9081ea9e # v1.6.0 +# with: +# channel_id: ${{ secrets.SLACK_ALERTS_CHANNEL_ID }} +# status: FAILED +# color: warning +# +# preview_docs: +# name: Preview Docs +# runs-on: ubuntu-22.04 +# needs: [changes] +# # don't run this for pull requests from forks +# if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == 'RasaHQ/rasa' +# +# steps: +# - name: Checkout git repository πŸ• +# if: needs.changes.outputs.docs == 'true' +# uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c +# +# - name: Set up Python 3.10 🐍 +# if: needs.changes.outputs.docs == 'true' +# uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b +# with: +# python-version: '3.10' +# +# - name: Set up Node 12.x πŸ¦™ +# if: needs.changes.outputs.docs == 'true' +# uses: actions/setup-node@64ed1c7eab4cce3362f8c340dee64e5eaeef8f7c +# with: +# node-version: "12.x" +# +# - name: Read Poetry Version πŸ”’ +# if: needs.changes.outputs.docs == 'true' +# run: | +# echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV +# shell: bash +# +# - name: Install poetry πŸ¦„ +# if: needs.changes.outputs.docs == 'true' +# uses: Gr1N/setup-poetry@15821dc8a61bc630db542ae4baf6a7c19a994844 # v8 +# with: +# poetry-version: ${{ env.POETRY_VERSION }} +# +# - name: Load Poetry Cached Libraries ⬇ +# id: cache-poetry +# if: needs.changes.outputs.docs == 'true' +# uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 +# with: +# path: .venv +# key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-3.9-non-full-${{ hashFiles('**/poetry.lock') }}-${{ secrets.POETRY_CACHE_VERSION }} +# restore-keys: ${{ runner.os }}-poetry-3.9-non-full +# +# - name: Clear Poetry cache +# if: steps.cache-poetry.outputs.cache-hit == 'true' && needs.changes.outputs.docs == 'true' && contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-preview-docs') +# run: rm -r .venv +# +# - name: Create virtual environment +# if: (steps.cache-poetry.outputs.cache-hit != 'true' || contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-preview-docs')) && needs.changes.outputs.docs == 'true' +# run: python -m venv create .venv +# +# - name: Set up virtual environment +# if: needs.changes.outputs.docs == 'true' +# run: poetry config virtualenvs.in-project true +# +# - name: Load Yarn Cached Packages ⬇ +# if: needs.changes.outputs.docs == 'true' +# uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 +# with: +# path: docs/node_modules +# key: ${{ runner.os }}-yarn-12.x-${{ hashFiles('docs/yarn.lock') }} +# restore-keys: ${{ runner.os }}-yarn-12.x +# +# - name: Install Dependencies πŸ“¦ +# if: needs.changes.outputs.docs == 'true' +# run: make install install-docs +# +# - name: Pre-build Docs 🧢 +# if: needs.changes.outputs.docs == 'true' +# run: make prepare-docs +# +# - name: Preview draft build πŸ”¬ +# if: needs.changes.outputs.docs == 'true' +# id: preview_draft_build +# env: +# NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} +# NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }} +# DOCS_SITE_BASE_URL: /docs/rasa +# PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} +# run: | +# make preview-docs +# DEPLOY_URL="https://$PULL_REQUEST_NUMBER--rasahq-docs-rasa-v2.netlify.app${DOCS_SITE_BASE_URL}" +# echo "preview_url=$DEPLOY_URL" >> $GITHUB_OUTPUT +# +# - name: Create a comment with help description +# if: needs.changes.outputs.docs == 'true' +# uses: RasaHQ/create-comment@da7b2ec20116674919493bb5894eea70fdaa6486 +# with: +# mode: "delete-previous" +# id: comment_docs_previews +# github-token: ${{ secrets.GITHUB_TOKEN }} +# body: | +# πŸš€ A preview of the docs have been deployed at the following URL: ${{ steps.preview_draft_build.outputs.preview_url }} +# +# publish_docs: +# name: Publish Docs +# runs-on: ubuntu-22.04 +# # don't run this for main branches of forks; only run on documentation branch +# if: github.repository == 'RasaHQ/rasa' && github.ref == 'refs/heads/documentation' +# +# steps: +# - name: Checkout git repository πŸ• +# uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c +# +# - name: Set up Node 12.x πŸ¦™ +# uses: actions/setup-node@64ed1c7eab4cce3362f8c340dee64e5eaeef8f7c +# with: +# node-version: "12.x" +# +# - name: Load Yarn Cached Packages ⬇ +# uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 +# with: +# path: docs/node_modules +# key: ${{ runner.os }}-yarn-12.x-${{ hashFiles('docs/yarn.lock') }} +# restore-keys: ${{ runner.os }}-yarn-12.x +# +# - name: Install Dependencies πŸ“¦ +# run: make install-docs +# +# - name: Publish production build βœ… +# env: +# NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} +# NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }} +# run: make publish-docs +# +# - name: Notify slack on failure +# if: failure() +# env: +# SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} +# uses: voxmedia/github-action-slack-notify-build@3665186a8c1a022b28a1dbe0954e73aa9081ea9e # v1.6.0 +# with: +# channel_id: ${{ secrets.SLACK_ALERTS_CHANNEL_ID }} +# status: FAILED +# color: warning