Skip to content

Merge pull request #182 from cagov/writing_building_footprints_to_s3 #67

Merge pull request #182 from cagov/writing_building_footprints_to_s3

Merge pull request #182 from cagov/writing_building_footprints_to_s3 #67

Workflow file for this run

name: docs
on:
push:
branches:
- main
permissions:
contents: write
env:
DBT_PROFILES_DIR: ci
PRIVATE_KEY: ${{ SECRETS.SNOWFLAKE_PRIVATE_KEY }}
SNOWFLAKE_PRIVATE_KEY_PATH: /tmp/private_key.p8
SNOWFLAKE_USER: GITHUB_ACTIONS_SVC_USER_DEV
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
- uses: snok/install-poetry@v1
with:
virtualenvs-create: false
- id: auth
name: Authenticate to Google Cloud
uses: google-github-actions/auth@v1
with:
# The credentials here can read metadata, data, and create jobs (execute
# queries), which allows it to compile the dbt project and generate docs.
credentials_json: ${{ secrets.GOOGLE_CREDENTIALS }}
export_environment_variables: true
- uses: actions/cache@v2
with:
key: ${{ github.ref }}
path: .cache
- name: Install dependencies
run: |
poetry install
# TODO: once we are on dbt-snowflake 1.5, no need to pipe to a file, we can
# just use $SNOWFLAKE_PRIVATE_KEY
- name: Set up private key
run: echo "$PRIVATE_KEY" > $SNOWFLAKE_PRIVATE_KEY_PATH
- name: Build dbt docs
run: |
# Generate snowflake dbt docs
dbt deps --project-dir=transform
dbt docs generate --project-dir=transform
cp -r transform/target docs/dbt_docs_snowflake
# Generate bigquery dbt docs
dbt deps --project-dir=transform-bigquery
dbt docs generate --project-dir=transform-bigquery
cp -r transform-bigquery/target docs/dbt_docs_bigquery
- name: Deploy 🚀
run: mkdocs gh-deploy --force