Skip to content

e2e

e2e #1732

Workflow file for this run

name: e2e
on:
workflow_dispatch:
inputs:
TARGET:
description: 'Target for profile'
required: true
default: 'dev'
type: choice
options:
- 'dev'
- 'prod'
DATASET:
description: 'BigQuery dataset name for "dev" profile'
required: true
default: 'z_dbt_wizard'
schedule:
# Runs “At minute 0 past every 6nd hour.” (see https://crontab.guru/#0_*/6_*_*_*)
- cron: '0 */6 * * *'
jobs:
package:
runs-on: ubuntu-latest
steps:
- name: checkout
uses: actions/checkout@v3
- name: set up node
uses: actions/setup-node@v3
with:
node-version: '19.x'
- name: build and test
run: |
npm install
npm run build
- name: set environment variable with file name
run: echo "FILE_NAME=${{ github.sha }}-darwin-x64.vsix" >> $GITHUB_ENV
- name: delete libraries for other platforms
run: |
find server/node_modules/@fivetrandevelopers/zetasql/lib/zetasql -maxdepth 1 -type f ! -name 'remote_server' -exec rm -f {} \;
- name: package to vsix file
run: npx @vscode/vsce package -o ${FILE_NAME} --target darwin-x64 --githubBranch main
- name: create cache
uses: actions/cache@v3
with:
path: ./${{ env.FILE_NAME }}
key: ${{ env.FILE_NAME }}
run-e2e-tests:
needs: package
runs-on: macos-latest
steps:
- name: set environment variable with file name
run: echo "FILE_NAME=${{ github.sha }}-darwin-x64.vsix" >> $GITHUB_ENV
- name: checkout
uses: actions/checkout@v3
- name: checkout repository with models
uses: actions/checkout@v3
with:
repository: fivetran/analytics
path: analytics
token: ${{ secrets.ANALYTICS_REPO_ACCESS_TOKEN }}
- name: load vsix from cache
uses: actions/cache@v3
id: cache
with:
path: ./${{ env.FILE_NAME }}
key: ${{ env.FILE_NAME }}
- name: fail if cache not hit
if: steps.cache.outputs.cache-hit != 'true'
run: exit 1
- uses: actions/setup-node@v3
with:
node-version: '19.x'
- name: build tests
run: | # ignore postinstall script and install dependencies for e2e
npm install --ignore-scripts
cd common
npx tsc --sourceMap false --project tsconfig.json
cd ../e2e
npm install
npx tsc --sourceMap false --project tsconfig.json
- uses: actions/setup-python@v4
id: setup-python
with:
python-version: '3.9.12'
- name: prepare profile config
run: |
mkdir -p ~/.dbt/
echo "$BQ_ANALYTICS_ACCOUNT" > ~/.dbt/analytics.json
user=$(whoami)
homedir=$(eval echo "~${user}")
keyFilePath=${homedir}/.dbt/analytics.json
echo ${keyFilePath}
private_key=$(echo ${BQ_ANALYTICS_ACCOUNT} | jq '.private_key')
read project_id private_key_id client_id client_email auth_uri token_uri auth_provider_x509_cert_url client_x509_cert_url < <(echo $(echo ${BQ_ANALYTICS_ACCOUNT} | jq -r '.project_id, .private_key_id, .client_id, .client_email, .auth_uri, .token_uri, .auth_provider_x509_cert_url, .client_x509_cert_url'))
if [[ -z "${TARGET}" ]]; then
target_value=dev
else
target_value="${TARGET}"
fi
if [[ -z "${DATASET}" ]]; then
dataset_value=z_dbt_wizard
else
dataset_value="${DATASET}"
fi
cat <<EOT > ~/.dbt/profiles.yml
fivetran:
outputs:
dev:
type: bigquery
method: service-account
project: ${project_id}
keyfile: ${keyFilePath}
dataset: ${dataset_value}
threads: 4
prod:
type: bigquery
method: service-account
project: ${project_id}
keyfile: ${keyFilePath}
dataset: transforms
threads: 4
target: ${target_value}
EOT
cat <<EOT > analytics/dbt_ft_prod.code-workspace
{
"folders": [
{
"path": "dbt_ft_prod"
}
],
"settings": {
"python.defaultInterpreterPath": "${PYTHON_INSTALL_LOC}"
}
}
env:
BQ_ANALYTICS_ACCOUNT: ${{ secrets.BQ_ANALYTICS_ACCOUNT }}
TARGET: ${{ inputs.TARGET }}
DATASET: ${{ inputs.DATASET }}
PYTHON_INSTALL_LOC: '${{ steps.setup-python.outputs.python-path }}'
- name: show prepared files
run: |
cd ~/.dbt/
pwd
ls -la
- name: install dbt
run: ${PYTHON_INSTALL_LOC} -m pip install "dbt-bigquery>=1.3.0,<1.4.0"
env:
PYTHON_INSTALL_LOC: '${{ steps.setup-python.outputs.python-path }}'
- name: show info
run: |
which python
python --version
python -m pip --version
python -m pip list | grep dbt
which dbt
dbt --version
node -v
echo ${PYTHON_INSTALL_LOC}
${PYTHON_INSTALL_LOC} --version
env:
PYTHON_INSTALL_LOC: '${{ steps.setup-python.outputs.python-path }}'
- name: build and test
run: |
cd analytics/dbt_ft_prod
dbt deps
- run: unzip ${{ env.FILE_NAME }} -d e2e-tests
- name: run e2e tests
run: node e2e/out/runners/runFtTest $(pwd)/e2e-tests/extension
- name: upload logs
if: always()
uses: actions/upload-artifact@v3
with:
name: all_logs
path: |
./.vscode-test/user-data/logs/**/*Wizard for dbt Core (TM).log
./e2e/out/diagnostics.txt
./e2e/out/log.txt
./analytics/dbt_ft_prod/logs/*