Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[E2E] - Job Status #1492

Merged
merged 3 commits into from
Aug 28, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 15 additions & 15 deletions test/e2e/integration/apply.bats
Original file line number Diff line number Diff line change
Expand Up @@ -22,51 +22,51 @@ setup() {
}

teardown() {
[[ -n "$BATS_TEST_COMPLETED" ]] || touch ${BATS_PARENT_TMPNAME}.skip
[[ -n $BATS_TEST_COMPLETED ]] || touch ${BATS_PARENT_TMPNAME}.skip
}

@test "We should be able to approve the terraform configuration" {
runit "kubectl -n ${APP_NAMESPACE} annotate configurations.terraform.appvia.io ${RESOURCE_NAME} \"terraform.appvia.io/apply\"=true --overwrite"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
}

@test "We should have a job created in the terraform-system ready to run" {
labels="terraform.appvia.io/configuration=${RESOURCE_NAME},terraform.appvia.io/stage=apply"

retry 50 "kubectl -n ${NAMESPACE} get job -l ${labels} -o json" "jq -r '.items[0].status.conditions[0].type' | grep -q Complete"
[[ "$status" -eq 0 ]]
retry 50 "kubectl -n ${NAMESPACE} get job -l ${labels} -o json" "jq -r '.items[0].status.conditions[0].type' | egrep -q '(Complete|SuccessCriteriaMet)'"
[[ $status -eq 0 ]]
runit "kubectl -n ${NAMESPACE} get job -l ${labels} -o json" "jq -r '.items[0].status.conditions[0].status' | grep -q True"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
}

@test "We should have a job created in the application namespace ready to watch apply" {
labels="terraform.appvia.io/configuration=${RESOURCE_NAME},terraform.appvia.io/stage=apply"

retry 10 "kubectl -n ${APP_NAMESPACE} get job -l ${labels} -o json" "jq -r '.items[0].status.conditions[0].type' | grep -q Complete"
[[ "$status" -eq 0 ]]
retry 10 "kubectl -n ${APP_NAMESPACE} get job -l ${labels} -o json" "jq -r '.items[0].status.conditions[0].type' | egrep -q '(Complete|SuccessCriteriaMet)'"
[[ $status -eq 0 ]]
runit "kubectl -n ${APP_NAMESPACE} get job -l ${labels} -o json" "jq -r '.items[0].status.conditions[0].status' | grep -q True"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
}

@test "We should have a configuration sucessfully applied" {
runit "kubectl -n ${APP_NAMESPACE} get configuration ${RESOURCE_NAME} -o json" "jq -r '.status.conditions[3].name' | grep -q 'Terraform Apply'"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
runit "kubectl -n ${APP_NAMESPACE} get configuration ${RESOURCE_NAME} -o json" "jq -r '.status.conditions[3].reason' | grep -q 'Ready'"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
runit "kubectl -n ${APP_NAMESPACE} get configuration ${RESOURCE_NAME} -o json" "jq -r '.status.conditions[3].status' | grep -q 'True'"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
runit "kubectl -n ${APP_NAMESPACE} get configuration ${RESOURCE_NAME} -o json" "jq -r '.status.conditions[3].type' | grep -q 'TerraformApply'"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
}

@test "We should be able to view the logs from the apply" {
POD=$(kubectl -n ${APP_NAMESPACE} get pod -l terraform.appvia.io/configuration=${RESOURCE_NAME} -l terraform.appvia.io/stage=apply -o json | jq -r '.items[0].metadata.name')
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
runit "kubectl -n ${APP_NAMESPACE} logs ${POD} 2>&1" "grep -q '\[build\] completed'"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
}

@test "We should have the terraform version on the status" {
runit "kubectl -n ${APP_NAMESPACE} get configuration ${RESOURCE_NAME} -o json" "jq -r '.status.terraformVersion' | grep -q '[0-9]\+\.[0-9]\+\.[0-9]\+'"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
}
56 changes: 28 additions & 28 deletions test/e2e/integration/checkov.bats
Original file line number Diff line number Diff line change
Expand Up @@ -19,22 +19,22 @@ load ../lib/helper

setup() {
[[ ! -f ${BATS_PARENT_TMPNAME}.skip ]] || skip "skip remaining tests"
[[ "${CLOUD}" == "aws" ]] || skip "skip for non-aws cloud"
[[ ${CLOUD} == "aws" ]] || skip "skip for non-aws cloud"
}

teardown() {
[[ -n "$BATS_TEST_COMPLETED" ]] || touch ${BATS_PARENT_TMPNAME}.skip
[[ -n $BATS_TEST_COMPLETED ]] || touch ${BATS_PARENT_TMPNAME}.skip
}

@test "We should clean the environment before running the tests" {
runit "kubectl -n ${APP_NAMESPACE} delete po --all"
[[ "${status}" -eq 0 ]]
[[ ${status} -eq 0 ]]
runit "kubectl -n ${APP_NAMESPACE} delete jobs --all"
[[ "${status}" -eq 0 ]]
[[ ${status} -eq 0 ]]
}

@test "We should be able to create a checkov policy to block resources" {
cat <<EOF > ${BATS_TMPDIR}/resource.yaml
cat << EOF > ${BATS_TMPDIR}/resource.yaml
apiVersion: terraform.appvia.io/v1alpha1
kind: Policy
metadata:
Expand All @@ -46,13 +46,13 @@ spec:
skipChecks: []
EOF
runit "kubectl apply -f ${BATS_TMPDIR}/resource.yaml"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
runit "kubectl get policies.terraform.appvia.io denied"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
}

@test "We should be create a configuration to verify the policy blocks" {
cat <<EOF >> ${BATS_TMPDIR}/resource.yaml
cat << EOF >> ${BATS_TMPDIR}/resource.yaml
---
apiVersion: terraform.appvia.io/v1alpha1
kind: Configuration
Expand All @@ -67,74 +67,74 @@ spec:
bucket_name: ${RESOURCE_NAME}-co
EOF
runit "kubectl -n ${APP_NAMESPACE} apply -f ${BATS_TMPDIR}/resource.yaml"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
runit "kubectl -n ${APP_NAMESPACE} get configuration ${RESOURCE_NAME}-co"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
}

@test "We should have a job created in the terraform namespace running the plan" {
labels="terraform.appvia.io/configuration=${RESOURCE_NAME}-co,terraform.appvia.io/stage=plan"

runit "kubectl -n ${NAMESPACE} get job -l ${labels}"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
}

@test "We should have a watcher job created in the configuration namespace" {
labels="terraform.appvia.io/configuration=${RESOURCE_NAME}-co,terraform.appvia.io/stage=plan"

runit "kubectl -n ${APP_NAMESPACE} get job -l ${labels}"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
}

@test "We should have a completed watcher job in the application namespace" {
labels="terraform.appvia.io/configuration=${RESOURCE_NAME}-co,terraform.appvia.io/stage=plan"

retry 10 "kubectl -n ${APP_NAMESPACE} get job -l ${labels} -o json" "jq -r '.items[0].status.conditions[0].type' | grep -q Complete"
[[ "$status" -eq 0 ]]
retry 30 "kubectl -n ${APP_NAMESPACE} get job -l ${labels} -o json" "jq -r '.items[0].status.conditions[0].type' | egrep -q '(Complete|SuccessCriteriaMet)'"
[[ $status -eq 0 ]]
runit "kubectl -n ${APP_NAMESPACE} get job -l ${labels} -o json" "jq -r '.items[0].status.conditions[0].status' | grep -q True"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
}

@test "We should have a secret containing the evaluation in the terraform namespace" {
UUID=$(kubectl -n ${APP_NAMESPACE} get configuration ${RESOURCE_NAME}-co -o json | jq -r '.metadata.uid')
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
runit "kubectl -n ${NAMESPACE} get secret policy-${UUID}"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
runit "kubectl -n ${NAMESPACE} get secret policy-${UUID} -o json" "jq -r '.data.results_json.json'"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
}

@test "We should have a copy the policy report in the configuration namespace" {
UUID=$(kubectl -n ${APP_NAMESPACE} get configuration ${RESOURCE_NAME}-co -o json | jq -r '.metadata.uid')
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
runit "kubectl -n ${APP_NAMESPACE} get secret policy-${UUID}"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
runit "kubectl -n ${APP_NAMESPACE} get secret policy-${UUID} -o json" "jq -r '.data.results_json.json'"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
}

@test "We should see the conditions indicate the configuration failed policy" {
POD=$(kubectl -n ${APP_NAMESPACE} get pod -l terraform.appvia.io/configuration=${RESOURCE_NAME}-co -l terraform.appvia.io/stage=plan -o json | jq -r '.items[0].metadata.name')
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]

runit "kubectl -n ${APP_NAMESPACE} logs ${POD} 2>&1" "grep -q 'EVALUATING AGAINST SECURITY POLICY'"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
runit "kubectl -n ${APP_NAMESPACE} logs ${POD} 2>&1" "grep -q 'FAILED for resource'"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
}

@test "We should have a event indicating the configuration failed policy" {
expected="Configuration has failed security policy, refusing to continue"

runit "kubectl -n ${APP_NAMESPACE} get event" "grep -q 'Configuration has failed security policy, refusing to continue'"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
}

@test "We should be able to cleanup the environment" {
runit "kubectl -n ${APP_NAMESPACE} delete configuration ${RESOURCE_NAME}-co"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
runit "kubectl -n ${APP_NAMESPACE} delete po --all"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
runit "kubectl delete policy denied"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
}
22 changes: 11 additions & 11 deletions test/e2e/integration/cloud/aws/costs.bats
Original file line number Diff line number Diff line change
Expand Up @@ -22,15 +22,15 @@ setup() {
}

teardown() {
[[ -n "$BATS_TEST_COMPLETED" ]] || touch ${BATS_PARENT_TMPNAME}.skip
[[ -n $BATS_TEST_COMPLETED ]] || touch ${BATS_PARENT_TMPNAME}.skip
}

@test "We should have a token for the infracost integration" {
[[ -n "$INFRACOST_TOKEN" ]] || touch ${BATS_PARENT_TMPNAME}.skip
[[ -n $INFRACOST_TOKEN ]] || touch ${BATS_PARENT_TMPNAME}.skip
}

@test "We should be able to create a configuration which costs money on aws" {
cat <<EOF > ${BATS_TMPDIR}/resource.yml
cat << EOF > ${BATS_TMPDIR}/resource.yml
---
apiVersion: terraform.appvia.io/v1alpha1
kind: Configuration
Expand All @@ -46,33 +46,33 @@ spec:
instance_type: m5.8xlarge
EOF
runit "kubectl -n ${APP_NAMESPACE} apply -f ${BATS_TMPDIR}/resource.yml"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
}

@test "We should have a job created in the terraform-system running the plan" {
labels="terraform.appvia.io/configuration=compute,terraform.appvia.io/stage=plan"

runit "kubectl -n ${NAMESPACE} get job -l ${labels}"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
}

@test "We should see the terraform plan complete successfully" {
labels="terraform.appvia.io/configuration=compute,terraform.appvia.io/stage=plan"

retry 50 "kubectl -n ${NAMESPACE} get job -l ${labels} -o json" "jq -r '.items[0].status.conditions[0].type' | grep -q Complete"
[[ "$status" -eq 0 ]]
retry 50 "kubectl -n ${NAMESPACE} get job -l ${labels} -o json" "jq -r '.items[0].status.conditions[0].type' | egrep -q '(Complete|SuccessCriteriaMet)'"
[[ $status -eq 0 ]]
runit "kubectl -n ${NAMESPACE} get job -l ${labels} -o json" "jq -r '.items[0].status.conditions[0].status' | grep -q True"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
}

@test "We should the predicted costs available on the status" {
runit "kubectl -n ${APP_NAMESPACE} get configuration compute -o json" "jq -r '.status.costs.enabled' | grep -q 'true'"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
runit "kubectl -n ${APP_NAMESPACE} get configuration compute -o json" "jq -r '.status.costs.monthly' | grep -q '^\$[0-9\.]*'"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
}

@test "We should be able to destroy the aws configuration for costs" {
runit "kubectl -n ${APP_NAMESPACE} delete -f ${BATS_TMPDIR}/resource.yml --wait=false"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
}
46 changes: 23 additions & 23 deletions test/e2e/integration/contexts.bats
Original file line number Diff line number Diff line change
Expand Up @@ -23,16 +23,16 @@ setup() {
}

teardown() {
[[ -n "$BATS_TEST_COMPLETED" ]] || touch ${BATS_PARENT_TMPNAME}.skip
[[ -n $BATS_TEST_COMPLETED ]] || touch ${BATS_PARENT_TMPNAME}.skip
}

@test "We should be able to retrieve a list of contexts" {
runit "kubectl get contexts.terraform.appvia.io"
[[ "${status}" -eq 0 ]]
[[ ${status} -eq 0 ]]
}

@test "We should be able to create a configuration context" {
cat <<EOF > ${BATS_TMPDIR}/resource.yaml
cat << EOF > ${BATS_TMPDIR}/resource.yaml
apiVersion: terraform.appvia.io/v1alpha1
kind: Context
metadata:
Expand All @@ -44,13 +44,13 @@ spec:
value: true
EOF
runit "kubectl apply -f ${BATS_TMPDIR}/resource.yaml"
[[ "${status}" -eq 0 ]]
[[ ${status} -eq 0 ]]
runit "kubectl get contexts.terraform.appvia.io default"
[[ "${status}" -eq 0 ]]
[[ ${status} -eq 0 ]]
}

@test "We should be able to update the terranetes context" {
cat <<EOF > ${BATS_TMPDIR}/resource.yaml
cat << EOF > ${BATS_TMPDIR}/resource.yaml
apiVersion: terraform.appvia.io/v1alpha1
kind: Context
metadata:
Expand All @@ -65,15 +65,15 @@ spec:
value: true
EOF
runit "kubectl apply -f ${BATS_TMPDIR}/resource.yaml"
[[ "${status}" -eq 0 ]]
[[ ${status} -eq 0 ]]
runit "kubectl get contexts.terraform.appvia.io default"
[[ "${status}" -eq 0 ]]
[[ ${status} -eq 0 ]]
}

@test "We should be able to use a context within a configuration" {
namespace="context-check"

cat <<EOF > ${BATS_TMPDIR}/resource.yaml
cat << EOF > ${BATS_TMPDIR}/resource.yaml
apiVersion: v1
kind: Namespace
metadata:
Expand All @@ -83,13 +83,13 @@ metadata:
EOF

runit "kubectl apply -f ${BATS_TMPDIR}/resource.yaml"
[[ "${status}" -eq 0 ]]
[[ ${status} -eq 0 ]]
runit "kubectl -n ${namespace} delete job --all"
[[ "${status}" -eq 0 ]]
[[ ${status} -eq 0 ]]
runit "kubectl -n ${namespace} delete pod --all"
[[ "${status}" -eq 0 ]]
[[ ${status} -eq 0 ]]

cat <<EOF > ${BATS_TMPDIR}/resource.yaml
cat << EOF > ${BATS_TMPDIR}/resource.yaml
---
apiVersion: terraform.appvia.io/v1alpha1
kind: Configuration
Expand All @@ -109,44 +109,44 @@ spec:
unused: $(date +"%s")
EOF
runit "kubectl -n ${namespace} apply -f ${BATS_TMPDIR}/resource.yaml"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
}

@test "We should have successfully ran the terraform plan" {
namespace="context-check"
labels="terraform.appvia.io/configuration=${RESOURCE_NAME},terraform.appvia.io/stage=plan"

retry 30 "kubectl -n ${namespace} get job -l ${labels} -o json" "jq -r '.items[0].status.conditions[0].type' | grep -q Complete"
[[ "$status" -eq 0 ]]
retry 30 "kubectl -n ${namespace} get job -l ${labels} -o json" "jq -r '.items[0].status.conditions[0].type' | egrep -q '(Complete|SuccessCriteriaMet)'"
[[ $status -eq 0 ]]
runit "kubectl -n ${namespace} get job -l ${labels} -o json" "jq -r '.items[0].status.conditions[0].status' | grep -q True"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
}

@test "We should be able to view the logs and see expected output" {
namespace="context-check"
labels="-l terraform.appvia.io/configuration=${RESOURCE_NAME} -l terraform.appvia.io/stage=plan"

POD=$(kubectl -n ${namespace} get pod ${labels} -o json | jq -r '.items[0].metadata.name')
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
runit "kubectl -n ${namespace} logs ${POD} 2>&1" "grep -q 'We expect to see this'"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
}

@test "We should not be able to delete context when in use" {
runit "kubectl delete contexts.terraform.appvia.io default 2>&1" "grep -q 'resource in use by configuration'"
[[ "${status}" -eq 0 ]]
[[ ${status} -eq 0 ]]
}

@test "We should be able to delete a configuration" {
namespace="context-check"

runit "kubectl -n ${namespace} delete configuration ${RESOURCE_NAME}"
[[ "$status" -eq 0 ]]
[[ $status -eq 0 ]]
}

@test "We should be able to delete the configuration context" {
runit "kubectl delete contexts.terraform.appvia.io default"
[[ "${status}" -eq 0 ]]
[[ ${status} -eq 0 ]]
runit "kubectl get contexts.terraform.appvia.io default 2>&1" "grep -q NotFound"
[[ "${status}" -eq 0 ]]
[[ ${status} -eq 0 ]]
}
Loading