Release to Cloud #14
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# What? | |
# | |
# Tag and release an arbitrary ref. Uploads to an internal archive for further processing. | |
# | |
# How? | |
# | |
# After checking out and testing the provided ref, the image is built and uploaded. | |
# | |
# When? | |
# | |
# Manual trigger. | |
name: "Release internal patch" | |
on: | |
workflow_dispatch: | |
inputs: | |
version_number: | |
description: "The release version number (i.e. 1.0.0b1)" | |
type: string | |
required: true | |
ref: | |
description: "The ref (sha or branch name) to use" | |
type: string | |
default: "main" | |
required: true | |
package_test_command: | |
description: "Package test command" | |
type: string | |
default: "python -c \"import dbt.adapters.spark\"" | |
required: true | |
defaults: | |
run: | |
shell: "bash" | |
env: | |
PYTHON_TARGET_VERSION: 3.8 | |
jobs: | |
run-unit-tests: | |
name: unit test / python ${{ matrix.python-version }} | |
runs-on: ubuntu-latest | |
timeout-minutes: 10 | |
strategy: | |
fail-fast: false | |
matrix: | |
python-version: ["3.8"] | |
steps: | |
- name: Check out the repository | |
uses: actions/checkout@v3 | |
- name: Set up Python ${{ matrix.python-version }} | |
uses: actions/setup-python@v4 | |
with: | |
python-version: ${{ matrix.python-version }} | |
- name: Install python dependencies | |
run: | | |
sudo apt-get update | |
sudo apt-get install libsasl2-dev | |
python -m pip install --user --upgrade pip | |
python -m pip --version | |
python -m pip install -r requirements.txt | |
python -m pip install -r dev-requirements.txt | |
python -m pip install -e . | |
- name: Run unit tests | |
run: python -m pytest --color=yes --csv unit_results.csv -v tests/unit | |
run-integration-tests: | |
name: ${{ matrix.test }} | |
runs-on: ubuntu-latest | |
strategy: | |
fail-fast: false | |
matrix: | |
test: | |
- "apache_spark" | |
- "spark_session" | |
- "databricks_sql_endpoint" | |
- "databricks_cluster" | |
- "databricks_http_cluster" | |
env: | |
DBT_INVOCATION_ENV: github-actions | |
DD_CIVISIBILITY_AGENTLESS_ENABLED: true | |
DD_API_KEY: ${{ secrets.DATADOG_API_KEY }} | |
DD_SITE: datadoghq.com | |
DD_ENV: ci | |
DD_SERVICE: ${{ github.event.repository.name }} | |
DBT_DATABRICKS_CLUSTER_NAME: ${{ secrets.DBT_DATABRICKS_CLUSTER_NAME }} | |
DBT_DATABRICKS_HOST_NAME: ${{ secrets.DBT_DATABRICKS_HOST_NAME }} | |
DBT_DATABRICKS_ENDPOINT: ${{ secrets.DBT_DATABRICKS_ENDPOINT }} | |
DBT_DATABRICKS_TOKEN: ${{ secrets.DBT_DATABRICKS_TOKEN }} | |
DBT_DATABRICKS_USER: ${{ secrets.DBT_DATABRICKS_USERNAME }} | |
DBT_TEST_USER_1: "[email protected]" | |
DBT_TEST_USER_2: "[email protected]" | |
DBT_TEST_USER_3: "[email protected]" | |
steps: | |
- name: Check out the repository | |
if: github.event_name != 'pull_request_target' | |
uses: actions/checkout@v3 | |
with: | |
persist-credentials: false | |
# explicitly checkout the branch for the PR, | |
# this is necessary for the `pull_request` event | |
- name: Check out the repository (PR) | |
if: github.event_name == 'pull_request_target' | |
uses: actions/checkout@v3 | |
with: | |
persist-credentials: false | |
ref: ${{ github.event.pull_request.head.ref }} | |
# the python version used here is not what is used in the tests themselves | |
- name: Set up Python for dagger | |
uses: actions/setup-python@v4 | |
with: | |
python-version: "3.11" | |
- name: Install python dependencies | |
run: | | |
python -m pip install --user --upgrade pip | |
python -m pip --version | |
python -m pip install -r dagger/requirements.txt | |
- name: Run tests for ${{ matrix.test }} | |
run: python dagger/run_dbt_spark_tests.py --profile ${{ matrix.test }} | |
invoke-reusable-workflow: | |
name: "Build and Release Internally" | |
needs: [run-unit-tests, run-integration-tests] | |
uses: "dbt-labs/dbt-release/.github/workflows/internal-archive-release.yml@mp/finish_internal_workflow" | |
with: | |
version_number: "${{ inputs.version_number }}" | |
package_test_command: "${{ inputs.package_test_command }}" | |
dbms_name: "spark" | |
ref: "${{ inputs.ref }}" | |
secrets: "inherit" |