Reception fix #411
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Run pr tests | |
on: | |
pull_request: | |
branches: | |
- master | |
jobs: | |
version-incremented: # checks that version has been incremented in node, python, and deployment. And that they match. | |
name: Are version numbers incremented | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 0 # Required due to get git tag list due to the way Git works | |
- uses: actions/setup-python@v3 | |
with: | |
python-version: '3.10' | |
- name: Check versions | |
run: python release_utils/check-tag-incremented.py | |
shell: sh | |
lint-gh-actions: | |
name: Lint GH Actions workflows | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v4 | |
- uses: reviewdog/action-actionlint@v1 | |
plan-terraform: # https://learn.hashicorp.com/tutorials/terraform/github-actions?in=terraform/automation | |
name: Run Terraform Validate and Plan | |
runs-on: ubuntu-latest | |
env: | |
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} | |
HCLOUD_TOKEN: ${{ secrets.HETZNER_API_TOKEN }} | |
AWS_ACCESS_KEY_ID: ${{ secrets.TF_STATE_BUCKET_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.TF_STATE_BUCKET_KEY }} | |
B2_APPLICATION_KEY_ID: ${{ secrets.B2_MASTER_KEY_ID }} | |
B2_APPLICATION_KEY: ${{ secrets.B2_MASTER_KEY }} | |
TF_VAR_pw_hash: ${{ secrets.NODE_ROOT_USER_PW_HASH }} | |
steps: | |
- uses: actions/checkout@v3 | |
- uses: hashicorp/setup-terraform@v3 | |
with: | |
terraform_version: "1.7.5" | |
- id: fmt | |
run: terraform fmt -check | |
- id: init | |
run: terraform init | |
working-directory: deploy/terraform | |
- id: validate | |
run: terraform validate -no-color | |
working-directory: deploy/terraform | |
- id: plan | |
run: terraform plan -no-color -input=false | |
working-directory: deploy/terraform | |
continue-on-error: true | |
- name: Update Pull Request | |
uses: actions/github-script@v6 | |
if: github.event_name == 'pull_request' | |
env: | |
PLAN: "terraform\n${{ steps.plan.outputs.stdout }}" | |
with: | |
github-token: ${{ secrets.GITHUB_TOKEN }} | |
script: | | |
const output = `## Terraform Lint/Plan Results | |
#### Terraform Format and Style 🖌\`${{ steps.fmt.outcome }}\` | |
#### Terraform Initialization ⚙️\`${{ steps.init.outcome }}\` | |
#### Terraform Plan 📖\`${{ steps.plan.outcome }}\` | |
#### Terraform Validation 🤖\`${{ steps.validate.outcome }}\` | |
<details><summary>Show Plan</summary> | |
\`\`\`\n | |
${process.env.PLAN} | |
\`\`\` | |
</details> | |
*Pushed by: @${{ github.actor }}, Action: \`${{ github.event_name }}\`*`; | |
github.rest.issues.createComment({ | |
issue_number: context.issue.number, | |
owner: context.repo.owner, | |
repo: context.repo.repo, | |
body: output | |
}) | |
- name: Terraform Plan Status | |
if: steps.plan.outcome == 'failure' | |
run: exit 1 | |
# Fetch kubeconfig for lint-kustomization job: | |
# How to fetch Terraform variables from CLI | |
# https://learn.hashicorp.com/tutorials/terraform/outputs | |
- name: Get node IPv4 address # GITHUB DOESNT SUPPORT IPV6 FFS | |
id: tf_ip_addr | |
run: terraform output -raw ipv4_address | |
working-directory: deploy/terraform | |
- uses: hashicorp/setup-terraform@v3 | |
with: | |
terraform_version: "1.7.5" | |
terraform_wrapper: false | |
- name: Get SSH key | |
run: terraform output -raw ssh_key > "$HOME/id_ed25519" | |
working-directory: deploy/terraform | |
- name: chmod SSH key | |
run: chmod 700 "$HOME/id_ed25519" | |
- name: Download kubeconfig file | |
run: | | |
scp \ | |
-qo "StrictHostKeyChecking=no" \ | |
-i "$HOME/id_ed25519" \ | |
root@${{ steps.tf_ip_addr.outputs.stdout }}:/etc/rancher/k3s/k3s.yaml . | |
- name: Set kubeconfig server URL and store kubeconfig in step output | |
uses: mikefarah/yq@master | |
with: | |
cmd: yq -i '.clusters[0].cluster.server = "https://${{ steps.tf_ip_addr.outputs.stdout }}:6443"' 'k3s.yaml' | |
- name: Send kubeconfig to lint-kustomization job # https://stackoverflow.com/a/57877438 | |
uses: actions/upload-artifact@v3 | |
with: | |
name: "kubeconfig" | |
path: "k3s.yaml" | |
lint-kustomization: | |
name: Lint Kustomize/Kubernetes | |
runs-on: ubuntu-latest | |
needs: plan-terraform # Needs kubeconfig from terraform | |
steps: | |
- uses: actions/checkout@v4 | |
- uses: actions/setup-python@v3 | |
with: | |
python-version: '3.10' | |
- name: 'Build tmeit.se manifests' | |
uses: stefanprodan/kube-tools@v1 | |
with: | |
kustomize: '4.5.7' | |
command: |- | |
kustomize build deploy/kubernetes/tmeit-se -o tmeit-rendered-with-jobs.yaml | |
- name: Remove Job manifests from test # Jobs are immutable and if we do kubectl apply with one without deleting it on the cluster, the api will throw an error | |
run: python release_utils/remove-job-manifests.py | |
shell: sh | |
- name: Receive kubeconfig from plan-terraform job # https://stackoverflow:com/a/57877438 | |
uses: actions/download-artifact@v3 | |
with: | |
name: "kubeconfig" | |
- name: Copy kubeconfig to correct location | |
run: mkdir "$HOME/.kube/" && mv k3s.yaml "$HOME/.kube/config" | |
- name: 'lint prod manifests' # NOTE: latest version of kubectl can be found here https://dl.k8s.io/release/stable.txt | |
run: | | |
curl -LO "https://dl.k8s.io/release/v1.25.3/bin/linux/amd64/kubectl" | |
curl -LO "https://dl.k8s.io/v1.25.3/bin/linux/amd64/kubectl.sha256" | |
echo "$(cat kubectl.sha256) kubectl" | sha256sum --check | |
chmod +x kubectl | |
kubectl apply --dry-run=server --server-side=true --force-conflicts -f tmeit-rendered.yaml | |
test-containerfile: # Build containers to test that they build properly | |
name: Build app containers | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Build containers | |
uses: redhat-actions/buildah-build@v2 | |
with: | |
image: tmeit-app | |
context: '.' | |
containerfiles: |- | |
containerfiles/tmeit-app.Containerfile | |
containerfiles/tmeit-run-migrations.Containerfile | |
containerfiles/tmeit-worker.Containerfile | |
containerfiles/db-backup-agent.Containerfile | |
test-app: # Run app tests | |
name: Test app | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Build test container | |
uses: redhat-actions/buildah-build@v2 | |
with: | |
image: tmeit-app-test | |
context: '.' | |
containerfiles: containerfiles/tmeit-app-test.Containerfile | |
- name: run test | |
run: podman run tmeit-app-test | |
test-migrations: # Run migrations and insert dummy data. Also make sure there are no migrations needed to be done | |
name: Test migrations | |
runs-on: ubuntu-latest | |
services: | |
postgres: # Run a postgres container alongside test to be tested on (The name of this key is also the hostname) | |
image: postgres:15beta1 | |
env: | |
POSTGRES_USER: tmeit_backend | |
POSTGRES_PASSWORD: test # Make sure this matches back/db_migrations/scripts/pr-migration-tests.sh | |
POSTGRES_DB: tmeit_backend | |
options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Build tester container | |
run: | | |
docker build . -f containerfiles/create-test-db.Containerfile -t create-test-db | |
docker build . -f containerfiles/test-migrations.Containerfile -t test-migrations | |
- name: Test migrations | |
run: docker run --network ${{ job.container.network }} test-migrations |