Skip to content

Test backport apm - DO NOT MERGE #13991

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 13 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
The diff you're trying to view is too large. We only load the first 3000 changed files.
27 changes: 25 additions & 2 deletions .buildkite/hooks/post-checkout
Original file line number Diff line number Diff line change
@@ -1,5 +1,18 @@
#!/bin/bash

# ******************************* WARNING ******************************************
# This post-checkout hook is not the same as in the rest of repositories e.g. beats
# because some steps in this pipeline (in PR context) take a very long time and we
# want to make sure that THE SAME COMMIT FROM TARGET BRANCH gets merged in every
# pipeline step. Otherwise, HEAD (target branch) may have changed in the meantime
# and therefore, some steps (e.g. sonarqube) may end up testing a different commit.
#
# Running builds from branches or tags (out of PR context) maintains the same behavior
# as in the rest of the repositories.
#
# Reference: https://github.com/elastic/integrations/pull/10397
# **********************************************************************************

set -euo pipefail

checkout_merge() {
Expand All @@ -13,8 +26,15 @@ checkout_merge() {
fi

git fetch -v origin "${target_branch}"
git checkout FETCH_HEAD
echo "Current branch: $(git rev-parse --abbrev-ref HEAD)"
if [[ ${REPOSITORY_TARGET_BRANCH_COMMIT} == "" ]]; then
git checkout FETCH_HEAD
echo "Current branch: $(git rev-parse --abbrev-ref HEAD)"
else
# Use the same commit from target branch as in the other steps.
echo "Retrieved commit from meta-data: ${REPOSITORY_TARGET_BRANCH_COMMIT}"
git checkout "${REPOSITORY_TARGET_BRANCH_COMMIT}"
echo "Current branch: $(git rev-parse --abbrev-ref HEAD)"
fi

# create temporal branch to merge the PR with the target branch
git checkout -b ${merge_branch}
Expand Down Expand Up @@ -44,6 +64,9 @@ PR_COMMIT="${BUILDKITE_COMMIT}"
PR_ID=${BUILDKITE_PULL_REQUEST}
MERGE_BRANCH="pr_merge_${PR_ID}"

# This meta-data field is populated in the pre-command hook
REPOSITORY_TARGET_BRANCH_COMMIT=$(buildkite-agent meta-data get "REPOSITORY_TARGET_BRANCH_COMMIT" --default "")

checkout_merge "${TARGET_BRANCH}" "${PR_COMMIT}" "${MERGE_BRANCH}"

echo "Commit information"
Expand Down
65 changes: 28 additions & 37 deletions .buildkite/hooks/pre-command
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ git config --global core.pager 'cat'
export UPLOAD_SAFE_LOGS=${UPLOAD_SAFE_LOGS:-"0"}
export SERVERLESS=${SERVERLESS:-"false"}
export STACK_VERSION=${STACK_VERSION:-""}
export ELASTIC_SUBSCRIPTION=${ELASTIC_SUBSCRIPTION:-""}
export STACK_LOGSDB_ENABLED=${STACK_LOGSDB_ENABLED:-"false"}
export FORCE_CHECK_ALL=${FORCE_CHECK_ALL:-"false"}
export PUBLISH_COVERAGE_REPORTS=${PUBLISH_COVERAGE_REPORTS:-"false"}

Expand All @@ -29,11 +31,7 @@ export TMP_FOLDER_TEMPLATE="${TMP_FOLDER_TEMPLATE_BASE}.XXXXXXXXX"
REPO_BUILD_TAG="${REPO_NAME}/$(buildkite_pr_branch_build_id)"
export REPO_BUILD_TAG

AWS_SERVICE_ACCOUNT_SECRET_PATH=kv/ci-shared/platform-ingest/aws_account_auth
PRIVATE_CI_GCS_CREDENTIALS_PATH=kv/ci-shared/platform-ingest/gcp-platform-ingest-ci-service-account

BUILDKITE_API_TOKEN_PATH=kv/ci-shared/platform-ingest/buildkite_token
GITHUB_TOKEN_VAULT_PATH=kv/ci-shared/platform-ingest/github_token

EC_TOKEN_PATH=kv/ci-shared/platform-ingest/platform-ingest-ec-qa
EC_DATA_PATH=secret/ci/elastic-integrations/ec_data
Expand All @@ -42,6 +40,8 @@ EC_DATA_PATH=secret/ci/elastic-integrations/ec_data
export ENVIRONMENT="ci"
export REPO="${REPO_NAME}"

export JOB_GCS_BUCKET_INTERNAL="ecosystem-ci-internal"

branch_name_label() {
local branch="$1"

Expand Down Expand Up @@ -74,14 +74,25 @@ if [ -n "${ELASTIC_PACKAGE_LINKS_FILE_PATH+x}" ]; then
export ELASTIC_PACKAGE_LINKS_FILE_PATH=${BASE_DIR}/${ELASTIC_PACKAGE_LINKS_FILE_PATH}
fi

if [[ ( "${BUILDKITE_PIPELINE_SLUG}" =~ ^(integrations|integrations-test-stack)$ ) && "${BUILDKITE_STEP_KEY}" == "reference-target-branch" ]]; then
# Get the commit from target branch in the first step (reference-target-branch).
# This step MUST be the first one and not run in parallel with any other step to ensure
# that there is just one value for this variable
if is_pr ; then
git fetch -v origin ${BUILDKITE_PULL_REQUEST_BASE_BRANCH}
commit_main=$(git rev-parse --verify FETCH_HEAD)
buildkite-agent meta-data set "REPOSITORY_TARGET_BRANCH_COMMIT" "${commit_main}"
fi
fi

if [[ "${BUILDKITE_PIPELINE_SLUG}" == "integrations-publish" ]]; then
if [[ "${BUILDKITE_STEP_KEY}" == "trigger-publish" ]]; then
BUILDKITE_API_TOKEN=$(retry 5 vault kv get -field buildkite_token ${BUILDKITE_API_TOKEN_PATH})
export BUILDKITE_API_TOKEN
fi
fi

if [[ "${BUILDKITE_PIPELINE_SLUG}" == "integrations" ]]; then
if [[ "${BUILDKITE_PIPELINE_SLUG}" =~ ^(integrations|integrations-test-stack)$ ]]; then
if [[ "${BUILDKITE_STEP_KEY}" == "test-integrations" ]]; then
BUILDKITE_API_TOKEN=$(retry 5 vault kv get -field buildkite_token "${BUILDKITE_API_TOKEN_PATH}")
export BUILDKITE_API_TOKEN
Expand All @@ -90,43 +101,18 @@ if [[ "${BUILDKITE_PIPELINE_SLUG}" == "integrations" ]]; then
if [[ "${BUILDKITE_STEP_KEY}" == "publish-benchmarks" ]]; then
BUILDKITE_API_TOKEN=$(retry 5 vault kv get -field buildkite_token "${BUILDKITE_API_TOKEN_PATH}")
export BUILDKITE_API_TOKEN
GITHUB_TOKEN=$(retry 5 vault kv get -field token "${GITHUB_TOKEN_VAULT_PATH}")
GITHUB_TOKEN=$VAULT_GITHUB_TOKEN
export GITHUB_TOKEN
fi

if [[ "${BUILDKITE_STEP_KEY}" =~ ^test-integrations- ]]; then
ELASTIC_PACKAGE_AWS_SECRET_KEY=$(retry 5 vault kv get -field secret_key "${AWS_SERVICE_ACCOUNT_SECRET_PATH}")
export ELASTIC_PACKAGE_AWS_SECRET_KEY
ELASTIC_PACKAGE_AWS_ACCESS_KEY=$(retry 5 vault kv get -field access_key "${AWS_SERVICE_ACCOUNT_SECRET_PATH}")
export ELASTIC_PACKAGE_AWS_ACCESS_KEY

PRIVATE_CI_GCS_CREDENTIALS_SECRET=$(retry 5 vault kv get -field plaintext -format=json "${PRIVATE_CI_GCS_CREDENTIALS_PATH}")
export PRIVATE_CI_GCS_CREDENTIALS_SECRET
export JOB_GCS_BUCKET_INTERNAL="ingest-buildkite-ci"

# Environment variables required by the service deployer
export AWS_SECRET_ACCESS_KEY=${ELASTIC_PACKAGE_AWS_SECRET_KEY}
export AWS_ACCESS_KEY_ID=${ELASTIC_PACKAGE_AWS_ACCESS_KEY}

BUILDKITE_API_TOKEN=$(retry 5 vault kv get -field buildkite_token "${BUILDKITE_API_TOKEN_PATH}")
export BUILDKITE_API_TOKEN
fi
fi

if [[ "${BUILDKITE_PIPELINE_SLUG}" == "integrations-serverless" ]]; then
if [[ "${BUILDKITE_STEP_KEY}" == "test-integrations-serverless-project" ]]; then
ELASTIC_PACKAGE_AWS_SECRET_KEY=$(retry 5 vault kv get -field secret_key "${AWS_SERVICE_ACCOUNT_SECRET_PATH}")
export ELASTIC_PACKAGE_AWS_SECRET_KEY
ELASTIC_PACKAGE_AWS_ACCESS_KEY=$(retry 5 vault kv get -field access_key "${AWS_SERVICE_ACCOUNT_SECRET_PATH}")
export ELASTIC_PACKAGE_AWS_ACCESS_KEY

PRIVATE_CI_GCS_CREDENTIALS_SECRET=$(retry 5 vault kv get -field plaintext -format=json "${PRIVATE_CI_GCS_CREDENTIALS_PATH}")
export PRIVATE_CI_GCS_CREDENTIALS_SECRET
export JOB_GCS_BUCKET_INTERNAL="ingest-buildkite-ci"

# Environment variables required by the service deployer
export AWS_SECRET_ACCESS_KEY=${ELASTIC_PACKAGE_AWS_SECRET_KEY}
export AWS_ACCESS_KEY_ID=${ELASTIC_PACKAGE_AWS_ACCESS_KEY}

BUILDKITE_API_TOKEN=$(retry 5 vault kv get -field buildkite_token "${BUILDKITE_API_TOKEN_PATH}")
export BUILDKITE_API_TOKEN
Expand All @@ -142,11 +128,16 @@ fi

if [[ "$BUILDKITE_PIPELINE_SLUG" == "integrations-backport" ]]; then
if [[ "$BUILDKITE_STEP_KEY" == "create-backport-branch" ]]; then
GITHUB_USERNAME_SECRET=$(retry 5 vault kv get -field username ${GITHUB_TOKEN_VAULT_PATH})
export GITHUB_USERNAME_SECRET
GITHUB_EMAIL_SECRET=$(retry 5 vault kv get -field email ${GITHUB_TOKEN_VAULT_PATH})
export GITHUB_EMAIL_SECRET
GITHUB_TOKEN=$(retry 5 vault kv get -field token "${GITHUB_TOKEN_VAULT_PATH}")
export GITHUB_TOKEN
GITHUB_USERNAME="elastic-vault-github-plugin-prod"
GITHUB_EMAIL="[email protected]"
GITHUB_TOKEN=$VAULT_GITHUB_TOKEN
export GITHUB_TOKEN GITHUB_EMAIL GITHUB_USERNAME
fi
fi

if [[ "$BUILDKITE_PIPELINE_SLUG" == "integrations" || "$BUILDKITE_PIPELINE_SLUG" == "integrations-serverless" ]]; then
if [[ "$BUILDKITE_STEP_KEY" == "report-failed-tests" ]]; then
export GITHUB_TOKEN="${VAULT_GITHUB_TOKEN}"
fi
fi

12 changes: 1 addition & 11 deletions .buildkite/hooks/pre-exit
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,9 @@ source .buildkite/scripts/common.sh

set -euo pipefail

if [[ "$BUILDKITE_PIPELINE_SLUG" == "integrations" ]]; then
if [[ "$BUILDKITE_PIPELINE_SLUG" =~ ^(integrations|integrations-test-stack)$ ]]; then
# FIXME: update condition depending on the pipeline steps triggered
if [[ "$BUILDKITE_STEP_KEY" =~ ^test-integrations- ]]; then
unset ELASTIC_PACKAGE_AWS_ACCESS_KEY
unset ELASTIC_PACKAGE_AWS_SECRET_KEY
unset AWS_ACCESS_KEY_ID
unset AWS_SECRET_ACCESS_KEY

# Ensure that kind cluster is deleted
delete_kind_cluster
Expand All @@ -25,10 +21,6 @@ fi

if [[ "$BUILDKITE_PIPELINE_SLUG" == "integrations-serverless" ]]; then
if [[ "$BUILDKITE_STEP_KEY" == "test-integrations-serverless-project" ]]; then
unset ELASTIC_PACKAGE_AWS_ACCESS_KEY
unset ELASTIC_PACKAGE_AWS_SECRET_KEY
unset AWS_ACCESS_KEY_ID
unset AWS_SECRET_ACCESS_KEY

# Ensure that kind cluster is deleted
delete_kind_cluster
Expand All @@ -44,8 +36,6 @@ fi
unset_secrets
cleanup

google_cloud_logout_active_account

if [[ "$BUILDKITE_PIPELINE_SLUG" == "integrations-backport" && "$BUILDKITE_STEP_KEY" == "create-backport-branch" ]]; then
cd "${WORKSPACE}"
git config remote.origin.url "https://github.com/elastic/integrations.git"
Expand Down
4 changes: 4 additions & 0 deletions .buildkite/pipeline.backport.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ name: "integrations-backport"

env:
YQ_VERSION: 'v4.35.2'
# Agent images used in pipeline steps
LINUX_AGENT_IMAGE: "golang:${GO_VERSION}"

steps:

Expand Down Expand Up @@ -49,6 +51,8 @@ steps:
- label: "Creating the backport branch"
key: "create-backport-branch"
command: ".buildkite/scripts/backport_branch.sh"
agents:
image: "${LINUX_AGENT_IMAGE}"
depends_on:
- step: "input-variables"
allow_failure: false
79 changes: 72 additions & 7 deletions .buildkite/pipeline.schedule-daily.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
name: integrations-schedule-daily

env:
SETUP_GVM_VERSION: "v0.5.1"
SETUP_GVM_VERSION: "v0.5.2"
LINUX_AGENT_IMAGE: "golang:${GO_VERSION}"

# The pipeline is triggered by the scheduler every day
Expand All @@ -20,26 +20,85 @@ steps:
build:
env:
SERVERLESS: "false"
SKIP_PUBLISHING: "true"
FORCE_CHECK_ALL: "true"
# STACK_VERSION: 7.17-SNAPSHOT # Using 7.17.19 till https://github.com/elastic/fleet-server/issues/3435 is solved.
STACK_VERSION: 7.17.19
STACK_VERSION: 7.17.28
depends_on:
- step: "check"
allow_failure: false
if: |
build.env('TEST_PACKAGES_7_BRANCH') == "true"

- label: "Check integrations local stacks - Stack Version v8.14"
- label: "Check integrations local stacks - Stack Version v8.19"
trigger: "integrations"
build:
env:
SERVERLESS: "false"
SKIP_PUBLISHING: "true"
FORCE_CHECK_ALL: "true"
STACK_VERSION: 8.14.0-SNAPSHOT
STACK_VERSION: 8.19.0-SNAPSHOT
PUBLISH_COVERAGE_REPORTS: "true"
depends_on:
- step: "check"
allow_failure: false
if: |
build.env('TEST_PACKAGES_8_BRANCH') == "true"

- label: "Check integrations local stacks - Stack Version v8.19 - LogsDB"
trigger: "integrations"
build:
env:
SERVERLESS: "false"
FORCE_CHECK_ALL: "true"
STACK_VERSION: 8.19.0-SNAPSHOT
STACK_LOGSDB_ENABLED: "true"
PUBLISH_COVERAGE_REPORTS: "false"
depends_on:
- step: "check"
allow_failure: false
if: |
build.env('TEST_PACKAGES_8_BRANCH') == "true"

- label: "Check integrations local stacks and basic subscription and LogsDB"
trigger: "integrations"
build:
env:
SERVERLESS: "false"
FORCE_CHECK_ALL: "true"
PUBLISH_COVERAGE_REPORTS: "false"
ELASTIC_SUBSCRIPTION: "basic"
STACK_LOGSDB_ENABLED: "true"
depends_on:
- step: "check"
allow_failure: false
if: |
build.env('TEST_PACKAGES_BASIC_SUBSCRIPTION') == "true"

- label: "Check integrations local stacks and basic subscription"
trigger: "integrations"
build:
env:
SERVERLESS: "false"
FORCE_CHECK_ALL: "true"
PUBLISH_COVERAGE_REPORTS: "false"
ELASTIC_SUBSCRIPTION: "basic"
depends_on:
- step: "check"
allow_failure: false
if: |
build.env('TEST_PACKAGES_BASIC_SUBSCRIPTION') == "true"

- label: "Check integrations local stacks - Stack Version v9.1"
trigger: "integrations"
build:
env:
SERVERLESS: "false"
FORCE_CHECK_ALL: "true"
STACK_VERSION: 9.1.0-SNAPSHOT
PUBLISH_COVERAGE_REPORTS: "false"
depends_on:
- step: "check"
allow_failure: false
if: |
build.env('TEST_PACKAGES_9_BRANCH') == "true"

- label: "Check integrations in serverless - project: Observability"
key: "trigger-integrations-serverless-obs"
Expand All @@ -50,6 +109,8 @@ steps:
depends_on:
- step: "check"
allow_failure: false
if: |
build.env('TEST_PACKAGES_SERVERLESS') == "true"

- label: "Check integrations in serverless - project: Security"
key: "trigger-integrations-serverless-security"
Expand All @@ -60,10 +121,14 @@ steps:
depends_on:
- step: "check"
allow_failure: false
if: |
build.env('TEST_PACKAGES_SERVERLESS') == "true"

- label: ":package: Publish missing packages"
key: "trigger-integrations-publish"
trigger: "integrations-publish"
depends_on:
- step: "check"
allow_failure: false
if: |
build.env('REPUBLISH_PACKAGES') == "true"
42 changes: 42 additions & 0 deletions .buildkite/pipeline.schedule-weekly.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json
name: integrations-schedule-weekly

env:
SETUP_GVM_VERSION: "v0.5.2"
LINUX_AGENT_IMAGE: "golang:${GO_VERSION}"

# The pipeline is triggered by the scheduler every week
steps:
- label: ":white_check_mark: Check go sources"
key: "check"
command: ".buildkite/scripts/check_sources.sh"
agents:
image: "${LINUX_AGENT_IMAGE}"
cpu: "8"
memory: "4G"

- label: "Check integrations local stacks and Elastic Agent Ubuntu docker - Stack Version v8.19"
trigger: "integrations"
build:
env:
SERVERLESS: "false"
FORCE_CHECK_ALL: "true"
STACK_VERSION: 8.19.0-SNAPSHOT
PUBLISH_COVERAGE_REPORTS: "false"
ELASTIC_PACKAGE_DISABLE_ELASTIC_AGENT_WOLFI: "true"
depends_on:
- step: "check"
allow_failure: false

- label: "Check integrations local stacks and non-wolfi images for Elastic Agent - Stack Version v9.1"
trigger: "integrations"
build:
env:
SERVERLESS: "false"
FORCE_CHECK_ALL: "true"
STACK_VERSION: 9.1.0-SNAPSHOT
PUBLISH_COVERAGE_REPORTS: "false"
ELASTIC_PACKAGE_DISABLE_ELASTIC_AGENT_WOLFI: "true"
depends_on:
- step: "check"
allow_failure: false
Loading