Release to Cloud off of main #28
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# What? | |
# | |
# Tag and release an arbitrary ref. Uploads to an internal archive for further processing. | |
# | |
# How? | |
# | |
# After checking out and testing the provided ref, the image is built and uploaded. | |
# | |
# When? | |
# | |
# Manual trigger. | |
name: "Release to Cloud" | |
run-name: "Release to Cloud off of ${{ inputs.ref }}" | |
on: | |
workflow_dispatch: | |
inputs: | |
ref: | |
description: "The ref (sha or branch name) to use" | |
type: string | |
default: "main" | |
required: true | |
package_test_command: | |
description: "Package test command" | |
type: string | |
default: "python -c \"import dbt.adapters.spark\"" | |
required: true | |
skip_tests: | |
description: "Should the tests be skipped? (default to false)" | |
type: boolean | |
required: true | |
default: false | |
defaults: | |
run: | |
shell: "bash" | |
env: | |
PYTHON_TARGET_VERSION: 3.8 | |
jobs: | |
run-unit-tests: | |
name: "Unit tests" | |
runs-on: ubuntu-latest | |
timeout-minutes: 10 | |
steps: | |
- name: "Check out the repository" | |
uses: actions/checkout@v4 | |
- name: "Set up Python ${{ env.PYTHON_TARGET_VERSION }}" | |
uses: actions/setup-python@v5 | |
with: | |
python-version: "${{ env.PYTHON_TARGET_VERSION }}" | |
- name: Install python dependencies | |
run: | | |
sudo apt-get update | |
sudo apt-get install libsasl2-dev | |
python -m pip install --user --upgrade pip | |
python -m pip --version | |
python -m pip install -r requirements.txt | |
python -m pip install -r dev-requirements.txt | |
python -m pip install -e . | |
- name: Run unit tests | |
run: python -m pytest --color=yes --csv unit_results.csv -v tests/unit | |
run-integration-tests: | |
name: "${{ matrix.test }}" | |
needs: [run-unit-tests] | |
runs-on: ubuntu-latest | |
strategy: | |
fail-fast: false | |
matrix: | |
test: | |
- "apache_spark" | |
- "spark_session" | |
- "databricks_sql_endpoint" | |
- "databricks_cluster" | |
- "databricks_http_cluster" | |
env: | |
DBT_INVOCATION_ENV: github-actions | |
DD_CIVISIBILITY_AGENTLESS_ENABLED: true | |
DD_API_KEY: ${{ secrets.DATADOG_API_KEY }} | |
DD_SITE: datadoghq.com | |
DD_ENV: ci | |
DD_SERVICE: ${{ github.event.repository.name }} | |
DBT_DATABRICKS_CLUSTER_NAME: ${{ secrets.DBT_DATABRICKS_CLUSTER_NAME }} | |
DBT_DATABRICKS_HOST_NAME: ${{ secrets.DBT_DATABRICKS_HOST_NAME }} | |
DBT_DATABRICKS_ENDPOINT: ${{ secrets.DBT_DATABRICKS_ENDPOINT }} | |
DBT_DATABRICKS_TOKEN: ${{ secrets.DBT_DATABRICKS_TOKEN }} | |
DBT_DATABRICKS_USER: ${{ secrets.DBT_DATABRICKS_USERNAME }} | |
DBT_TEST_USER_1: "[email protected]" | |
DBT_TEST_USER_2: "[email protected]" | |
DBT_TEST_USER_3: "[email protected]" | |
steps: | |
- name: Check out the repository | |
if: github.event_name != 'pull_request_target' | |
uses: actions/checkout@v4 | |
with: | |
persist-credentials: false | |
# explicitly checkout the branch for the PR, | |
# this is necessary for the `pull_request` event | |
- name: Check out the repository (PR) | |
if: github.event_name == 'pull_request_target' | |
uses: actions/checkout@v4 | |
with: | |
persist-credentials: false | |
ref: ${{ github.event.pull_request.head.ref }} | |
# the python version used here is not what is used in the tests themselves | |
- name: Set up Python for dagger | |
uses: actions/setup-python@v5 | |
with: | |
python-version: "3.11" | |
- name: Install python dependencies | |
run: | | |
python -m pip install --user --upgrade pip | |
python -m pip --version | |
python -m pip install -r dagger/requirements.txt | |
- name: "Run tests for ${{ matrix.test }}" | |
run: python dagger/run_dbt_spark_tests.py --profile ${{ matrix.test }} | |
invoke-reusable-workflow: | |
name: "Create cloud release" | |
needs: [run-integration-tests] | |
uses: "dbt-labs/dbt-release/.github/workflows/internal-archive-release.yml@main" | |
with: | |
package_test_command: "${{ inputs.package_test_command }}" | |
dbms_name: "spark" | |
ref: "${{ inputs.ref }}" | |
skip_tests: "${{ inputs.skip_tests }}" | |
secrets: "inherit" |