diff --git a/.github/actions/deb-delivery/action.yml b/.github/actions/deb-delivery/action.yml index 46b6c5ec189..1c6a3850ba0 100644 --- a/.github/actions/deb-delivery/action.yml +++ b/.github/actions/deb-delivery/action.yml @@ -22,7 +22,7 @@ inputs: release_type: description: "Type of release (hotfix, release)" required: true - release_cloud: + is_cloud: description: "Release context (cloud or not cloud)" required: true @@ -49,12 +49,12 @@ runs: echo "[DEBUG] - Version: ${{ inputs.version }}" echo "[DEBUG] - Distrib: ${{ inputs.distrib }}" echo "[DEBUG] - module_name: ${{ inputs.module_name }}" - echo "[DEBUG] - release_cloud: ${{ inputs.release_cloud }}" + echo "[DEBUG] - is_cloud: ${{ inputs.is_cloud }}" echo "[DEBUG] - release_type: ${{ inputs.release_type }}" echo "[DEBUG] - stability: ${{ inputs.stability }}" # Make sure all required inputs are NOT empty - if [[ -z "${{ inputs.module_name }}" || -z "${{ inputs.distrib }}" || -z ${{ inputs.stability }} || -z ${{ inputs.version }} || -z ${{ inputs.release_cloud }} || -z ${{ inputs.release_type }} ]]; then + if [[ -z "${{ inputs.module_name }}" || -z "${{ inputs.distrib }}" || -z ${{ inputs.stability }} || -z ${{ inputs.version }} || -z "${{ inputs.is_cloud }}" ]]; then echo "Some mandatory inputs are empty, please check the logs." exit 1 fi diff --git a/.github/actions/delivery/action.yml b/.github/actions/delivery/action.yml index 8cbca5c8073..663b1f35549 100644 --- a/.github/actions/delivery/action.yml +++ b/.github/actions/delivery/action.yml @@ -22,7 +22,7 @@ inputs: release_type: description: "Type of release (hotfix, release)" required: true - release_cloud: + is_cloud: description: "Release context (cloud or not cloud)" required: true @@ -63,12 +63,12 @@ runs: echo "[DEBUG] - Major version: ${{ inputs.major_version }}" echo "[DEBUG] - Distrib: ${{ inputs.distrib }}" echo "[DEBUG] - module_name: ${{ inputs.module_name }}" - echo "[DEBUG] - release_cloud: ${{ inputs.release_cloud }}" + echo "[DEBUG] - is_cloud: ${{ inputs.is_cloud }}" echo "[DEBUG] - release_type: ${{ inputs.release_type }}" echo "[DEBUG] - stability: ${{ inputs.stability }}" # Make sure all required inputs are NOT empty - if [[ -z "${{ inputs.module_name }}" || -z "${{ inputs.distrib }}" || -z ${{ inputs.stability }} || -z ${{ inputs.major_version }} || -z ${{ inputs.release_cloud }} || -z ${{ inputs.release_type }} ]]; then + if [[ -z "${{ inputs.module_name }}" || -z "${{ inputs.distrib }}" || -z ${{ inputs.stability }} || -z ${{ inputs.major_version }} || -z "${{ inputs.is_cloud }}" ]]; then echo "Some mandatory inputs are empty, please check the logs." exit 1 fi @@ -87,32 +87,32 @@ runs: mv "$FILE" "$ARCH" done - # Build upload target path based on release_cloud and release_type values + # Build upload target path based on is_cloud and release_type values # if cloud + hotfix or cloud + release, deliver to internal testing- # if cloud + develop, delivery to internal unstable # if non-cloud, delivery to onprem testing or unstable # CLOUD + HOTFIX + REPO STANDARD INTERNAL OR CLOUD + RELEASE + REPO STANDARD INTERNAL - if [[ ${{ inputs.release_cloud }} -eq 1 && ( ${{ inputs.release_type }} == "hotfix" || ${{ inputs.release_type }} == "release" ) ]]; then + if [[ "${{ inputs.is_cloud }}" == "true" && ( "${{ inputs.release_type }}" == "hotfix" || "${{ inputs.release_type }}" == "release" ) ]]; then echo "[DEBUG] : Release cloud + ${{ inputs.release_type }}, using rpm-standard-internal." ROOT_REPO_PATHS="rpm-standard-internal" UPLOAD_REPO_PATH="${{ inputs.major_version }}/${{ inputs.distrib }}/${{ inputs.stability }}-${{ inputs.release_type }}/$ARCH/${{ inputs.module_name }}/" # CLOUD + NOT HOTFIX OR CLOUD + NOT RELEASE + REPO STANDARD INTERNAL - elif [[ ${{ inputs.release_cloud }} -eq 1 && ( ${{ inputs.release_type }} != "hotfix" && ${{ inputs.release_type }} != "release" ) ]]; then + elif [[ "${{ inputs.is_cloud }}" == "true" && ( "${{ inputs.release_type }}" != "hotfix" && "${{ inputs.release_type }}" != "release" ) ]]; then echo "[DEBUG] : Release cloud + NOT ${{ inputs.release_type }}, using rpm-standard-internal." ROOT_REPO_PATHS="rpm-standard-internal" UPLOAD_REPO_PATH="${{ inputs.major_version }}/${{ inputs.distrib }}/${{ inputs.stability }}-${{ inputs.release_type }}/$ARCH/${{ inputs.module_name }}/" # NON-CLOUD + (HOTFIX OR RELEASE) + REPO STANDARD - elif [[ ${{ inputs.release_cloud }} -eq 0 ]]; then + elif [[ "${{ inputs.is_cloud }}" == "false" ]]; then echo "[DEBUG] : NOT Release cloud + ${{ inputs.release_type }}, using rpm-standard." ROOT_REPO_PATHS="rpm-standard" UPLOAD_REPO_PATH="${{ inputs.major_version }}/${{ inputs.distrib }}/${{ inputs.stability }}/$ARCH/${{ inputs.module_name }}/" # NOT VALID, DO NOT DELIVER else - echo "::error:: Invalid combination of release_type [${{ inputs.release_type }}] and release_cloud [${{ inputs.release_cloud }}]" + echo "::error:: Invalid combination of release_type [${{ inputs.release_type }}] and is_cloud [${{ inputs.is_cloud }}]" exit 1 fi @@ -141,12 +141,12 @@ runs: echo "[DEBUG] - Major version: ${{ inputs.major_version }}" echo "[DEBUG] - Distrib: ${{ inputs.distrib }}" echo "[DEBUG] - module_name: ${{ inputs.module_name }}" - echo "[DEBUG] - release_cloud: ${{ inputs.release_cloud }}" + echo "[DEBUG] - is_cloud: ${{ inputs.is_cloud }}" echo "[DEBUG] - release_type: ${{ inputs.release_type }}" echo "[DEBUG] - stability: ${{ inputs.stability }}" # Make sure all required inputs are NOT empty - if [[ -z "${{ inputs.module_name }}" || -z "${{ inputs.distrib }}" || -z ${{ inputs.stability }} || -z ${{ inputs.major_version }} || -z ${{ inputs.release_cloud }} || -z ${{ inputs.release_type }} ]]; then + if [[ -z "${{ inputs.module_name }}" || -z "${{ inputs.distrib }}" || -z ${{ inputs.stability }} || -z ${{ inputs.major_version }} || -z "${{ inputs.is_cloud }}" ]]; then echo "Some mandatory inputs are empty, please check the logs." exit 1 fi diff --git a/.github/actions/promote-to-stable/action.yml b/.github/actions/promote-to-stable/action.yml index 4432aee5663..feb817f2cd3 100644 --- a/.github/actions/promote-to-stable/action.yml +++ b/.github/actions/promote-to-stable/action.yml @@ -22,7 +22,7 @@ inputs: release_type: description: "Type of release (hotfix, release)" required: true - release_cloud: + is_cloud: description: "Release context (cloud or not cloud)" required: true @@ -42,7 +42,7 @@ runs: # DEBUG echo "[DEBUG] - Major version: ${{ inputs.major_version }}" echo "[DEBUG] - Distrib: ${{ inputs.distrib }}" - echo "[DEBUG] - release_cloud: ${{ inputs.release_cloud }}" + echo "[DEBUG] - is_cloud: ${{ inputs.is_cloud }}" echo "[DEBUG] - release_type: ${{ inputs.release_type }}" # Cloud specific promote @@ -56,15 +56,15 @@ runs: # Search for testing packages candidate for promote for ARCH in "noarch" "x86_64"; do - # Build search path based on release_cloud and release_type values + # Build search path based on is_cloud and release_type values # if cloud, search in testing- path # if non-cloud, search in the testing usual path - if [[ ${{ inputs.release_cloud }} -eq 1 && ${{ inputs.release_type }} == "hotfix" ]] || [[ ${{ inputs.release_cloud }} -eq 1 && ${{ inputs.release_type }} == "release" ]]; then + if [[ "${{ inputs.is_cloud }}" == "true" && "${{ inputs.release_type }}" == "hotfix" ]] || [[ "${{ inputs.is_cloud }}" == "true" && "${{ inputs.release_type }}" == "release" ]]; then SEARCH_REPO_PATH="${{ inputs.major_version }}/${{ inputs.distrib }}/testing-${{ inputs.release_type }}/$ARCH/${{ inputs.module_name }}" - elif [[ ${{ inputs.release_cloud }} -eq 0 ]]; then + elif [[ "${{ inputs.is_cloud }}" == "false" ]]; then SEARCH_REPO_PATH="${{ inputs.major_version }}/${{ inputs.distrib }}/testing/$ARCH/${{ inputs.module_name }}" else - echo "Invalid combination of release_type and release_cloud" + echo "Invalid combination of release_type and is_cloud" fi echo "[DEBUG] - Get path of $ARCH testing artifacts to promote to stable." diff --git a/.github/actions/rpm-delivery/action.yml b/.github/actions/rpm-delivery/action.yml index 3174c753300..b1fbc79e2d7 100644 --- a/.github/actions/rpm-delivery/action.yml +++ b/.github/actions/rpm-delivery/action.yml @@ -22,7 +22,7 @@ inputs: release_type: description: "Type of release (hotfix, release)" required: true - release_cloud: + is_cloud: description: "Release context (cloud or not cloud)" required: true @@ -61,12 +61,12 @@ runs: echo "[DEBUG] - Version: ${{ inputs.version }}" echo "[DEBUG] - Distrib: ${{ inputs.distrib }}" echo "[DEBUG] - module_name: ${{ inputs.module_name }}" - echo "[DEBUG] - release_cloud: ${{ inputs.release_cloud }}" + echo "[DEBUG] - is_cloud: ${{ inputs.is_cloud }}" echo "[DEBUG] - release_type: ${{ inputs.release_type }}" echo "[DEBUG] - stability: ${{ inputs.stability }}" # Make sure all required inputs are NOT empty - if [[ -z "${{ inputs.module_name }}" || -z "${{ inputs.distrib }}" || -z ${{ inputs.stability }} || -z ${{ inputs.version }} || -z ${{ inputs.release_cloud }} || -z ${{ inputs.release_type }} ]]; then + if [[ -z "${{ inputs.module_name }}" || -z "${{ inputs.distrib }}" || -z ${{ inputs.stability }} || -z ${{ inputs.version }} || -z "${{ inputs.is_cloud }}" ]]; then echo "Some mandatory inputs are empty, please check the logs." exit 1 fi @@ -85,32 +85,32 @@ runs: mv "$FILE" "$ARCH" done - # Build upload target path based on release_cloud and release_type values + # Build upload target path based on is_cloud and release_type values # if cloud + hotfix or cloud + release, deliver to internal testing- # if cloud + develop, delivery to internal unstable # if non-cloud, delivery to onprem testing or unstable # CLOUD + HOTFIX + REPO STANDARD INTERNAL OR CLOUD + RELEASE + REPO STANDARD INTERNAL - if [[ ${{ inputs.release_cloud }} -eq 1 ]] && ([[ ${{ inputs.release_type }} == "hotfix" ]] || [[ ${{ inputs.release_type }} == "release" ]]); then + if [[ "${{ inputs.is_cloud }}" == "true" ]] && ([[ "${{ inputs.release_type }}" == "hotfix" ]] || [[ "${{ inputs.release_type }}" == "release" ]]); then echo "[DEBUG] : Release cloud + ${{ inputs.release_type }}, using rpm-standard-internal." ROOT_REPO_PATHS="rpm-standard-internal" UPLOAD_REPO_PATH="${{ inputs.version }}/${{ inputs.distrib }}/${{ inputs.stability }}-${{ inputs.release_type }}/$ARCH/${{ inputs.module_name }}/" # CLOUD + NOT HOTFIX OR CLOUD + NOT RELEASE + REPO STANDARD INTERNAL - elif [[ ${{ inputs.release_cloud }} -eq 1 ]] && ([[ ${{ inputs.release_type }} != "hotfix" ]] || [[ ${{ inputs.release_type }} != "release" ]]); then + elif [[ "${{ inputs.is_cloud }}" == "true" ]] && ([[ "${{ inputs.release_type }}" != "hotfix" ]] || [[ "${{ inputs.release_type }}" != "release" ]]); then echo "[DEBUG] : Release cloud + NOT ${{ inputs.release_type }}, using rpm-standard-internal." ROOT_REPO_PATHS="rpm-standard-internal" UPLOAD_REPO_PATH="${{ inputs.version }}/${{ inputs.distrib }}/${{ inputs.stability }}-${{ inputs.release_type }}/$ARCH/${{ inputs.module_name }}/" # NON-CLOUD + (HOTFIX OR RELEASE) + REPO STANDARD - elif [[ ${{ inputs.release_cloud }} -eq 0 ]]; then + elif [[ "${{ inputs.is_cloud }}" == "false" ]]; then echo "[DEBUG] : NOT Release cloud + ${{ inputs.release_type }}, using rpm-standard." ROOT_REPO_PATHS="rpm-standard" UPLOAD_REPO_PATH="${{ inputs.version }}/${{ inputs.distrib }}/${{ inputs.stability }}/$ARCH/${{ inputs.module_name }}/" # ANYTHING ELSE else - echo "::error:: Invalid combination of release_type [${{ inputs.release_type }}] and release_cloud [${{ inputs.release_cloud }}]" + echo "::error:: Invalid combination of release_type [${{ inputs.release_type }}] and is_cloud [${{ inputs.is_cloud }}]" exit 1 fi diff --git a/.github/workflows/centreon-collect.yml b/.github/workflows/centreon-collect.yml index e966efa46c4..87a831f5324 100644 --- a/.github/workflows/centreon-collect.yml +++ b/.github/workflows/centreon-collect.yml @@ -47,14 +47,14 @@ on: - "!**/test/**" jobs: - get-version: - uses: ./.github/workflows/get-version.yml + get-environment: + uses: ./.github/workflows/get-environment.yml with: version_file: CMakeLists.txt unit-test: - needs: [get-version] - if: ${{ ! contains(fromJson('["stable"]'), needs.get-version.outputs.stability) }} + needs: [get-environment] + if: ${{ ! contains(fromJson('["stable"]'), needs.get-environment.outputs.stability) }} runs-on: [self-hosted, collect] strategy: @@ -71,8 +71,8 @@ jobs: uses: docker/login-action@0d4c9c5ea7693da7b068278f7b52bda2a190a446 # v3.2.0 with: registry: ${{ vars.DOCKER_INTERNAL_REGISTRY_URL }} - username: ${{ secrets.DOCKER_REGISTRY_ID }} - password: ${{ secrets.DOCKER_REGISTRY_PASSWD }} + username: ${{ secrets.HARBOR_CENTREON_PULL_USERNAME }} + password: ${{ secrets.HARBOR_CENTREON_PULL_TOKEN }} - name: Test ${{ matrix.image }} uses: ./.github/actions/runner-docker @@ -80,25 +80,25 @@ jobs: registry_url: ${{ vars.DOCKER_INTERNAL_REGISTRY_URL }} script_name: /src/.github/scripts/collect-unit-tests image_name: centreon-collect-${{ matrix.image }} - image_version: ${{ needs.get-version.outputs.img_version }} + image_version: ${{ needs.get-environment.outputs.img_version }} package: - needs: [get-version] - if: ${{ ! contains(fromJson('["stable"]'), needs.get-version.outputs.stability) }} + needs: [get-environment] + if: ${{ ! contains(fromJson('["stable"]'), needs.get-environment.outputs.stability) }} uses: ./.github/workflows/package-collect.yml with: - major_version: ${{ needs.get-version.outputs.major_version }} - minor_version: ${{ needs.get-version.outputs.minor_version }} - img_version: ${{ needs.get-version.outputs.img_version }} - release: ${{ needs.get-version.outputs.release }} + major_version: ${{ needs.get-environment.outputs.major_version }} + minor_version: ${{ needs.get-environment.outputs.minor_version }} + img_version: ${{ needs.get-environment.outputs.img_version }} + release: ${{ needs.get-environment.outputs.release }} commit_hash: ${{ github.sha }} - stability: ${{ needs.get-version.outputs.stability }} + stability: ${{ needs.get-environment.outputs.stability }} secrets: inherit deliver-sources: runs-on: [self-hosted, common] - needs: [get-version, package] - if: ${{ contains(fromJson('["stable"]'), needs.get-version.outputs.stability) && github.event_name != 'workflow_dispatch' }} + needs: [get-environment, package] + if: ${{ contains(fromJson('["stable"]'), needs.get-environment.outputs.stability) && github.event_name != 'workflow_dispatch' }} steps: - name: Checkout sources @@ -112,14 +112,13 @@ jobs: bucket_directory: centreon-collect module_directory: centreon-collect module_name: centreon-collect - major_version: ${{ needs.get-version.outputs.major_version }} - minor_version: ${{ needs.get-version.outputs.minor_version }} + major_version: ${{ needs.get-environment.outputs.major_version }} + minor_version: ${{ needs.get-environment.outputs.minor_version }} token_download_centreon_com: ${{ secrets.TOKEN_DOWNLOAD_CENTREON_COM }} deliver-rpm: - if: ${{ contains(fromJson('["testing", "stable"]'), needs.get-version.outputs.stability) }} - needs: [get-version, package] - environment: ${{ needs.get-version.outputs.environment }} + if: ${{ contains(fromJson('["testing", "stable"]'), needs.get-environment.outputs.stability) }} + needs: [get-environment, package] runs-on: [self-hosted, common] strategy: matrix: @@ -140,17 +139,16 @@ jobs: with: module_name: collect distrib: ${{ matrix.distrib }} - major_version: ${{ needs.get-version.outputs.major_version }} + major_version: ${{ needs.get-environment.outputs.major_version }} artifactory_token: ${{ secrets.ARTIFACTORY_ACCESS_TOKEN }} cache_key: ${{ github.run_id }}-${{ github.sha }}-rpm-centreon-collect-${{ matrix.distrib }}-${{ matrix.arch }}-${{ github.head_ref || github.ref_name }} - stability: ${{ needs.get-version.outputs.stability }} - release_type: ${{ needs.get-version.outputs.release_type }} - release_cloud: ${{ needs.get-version.outputs.release_cloud }} + stability: ${{ needs.get-environment.outputs.stability }} + release_type: ${{ needs.get-environment.outputs.release_type }} + is_cloud: ${{ needs.get-environment.outputs.is_cloud }} deliver-deb: - if: ${{ contains(fromJson('["testing", "stable"]'), needs.get-version.outputs.stability) }} - needs: [get-version, package] - environment: ${{ needs.get-version.outputs.environment }} + if: ${{ contains(fromJson('["testing", "stable"]'), needs.get-environment.outputs.stability) }} + needs: [get-environment, package] runs-on: [self-hosted, common] strategy: matrix: @@ -171,16 +169,16 @@ jobs: with: module_name: collect distrib: ${{ matrix.distrib }} - major_version: ${{ needs.get-version.outputs.major_version }} + major_version: ${{ needs.get-environment.outputs.major_version }} artifactory_token: ${{ secrets.ARTIFACTORY_ACCESS_TOKEN }} cache_key: ${{ github.run_id }}-${{ github.sha }}-deb-centreon-collect-${{ matrix.distrib }}-${{ matrix.arch }}-${{ github.head_ref || github.ref_name }} - stability: ${{ needs.get-version.outputs.stability }} - release_type: ${{ needs.get-version.outputs.release_type }} - release_cloud: ${{ needs.get-version.outputs.release_cloud }} + stability: ${{ needs.get-environment.outputs.stability }} + release_type: ${{ needs.get-environment.outputs.release_type }} + is_cloud: ${{ needs.get-environment.outputs.is_cloud }} promote: - needs: [get-version] - if: ${{ contains(fromJson('["stable"]'), needs.get-version.outputs.stability) && github.event_name != 'workflow_dispatch' }} + needs: [get-environment] + if: ${{ contains(fromJson('["stable"]'), needs.get-environment.outputs.stability) && github.event_name != 'workflow_dispatch' }} runs-on: [self-hosted, common] strategy: matrix: @@ -196,8 +194,8 @@ jobs: artifactory_token: ${{ secrets.ARTIFACTORY_ACCESS_TOKEN }} module_name: collect distrib: ${{ matrix.distrib }} - major_version: ${{ needs.get-version.outputs.major_version }} - stability: ${{ needs.get-version.outputs.stability }} + major_version: ${{ needs.get-environment.outputs.major_version }} + stability: ${{ needs.get-environment.outputs.stability }} github_ref_name: ${{ github.ref_name }} - release_type: ${{ needs.get-version.outputs.release_type }} - release_cloud: ${{ needs.get-version.outputs.release_cloud }} + release_type: ${{ needs.get-environment.outputs.release_type }} + is_cloud: ${{ needs.get-environment.outputs.is_cloud }} diff --git a/.github/workflows/check-status.yml b/.github/workflows/check-status.yml index 36799865754..b56f2253b55 100644 --- a/.github/workflows/check-status.yml +++ b/.github/workflows/check-status.yml @@ -39,7 +39,7 @@ jobs: script: | await exec.exec("sleep 20s"); - for (let i = 0; i < 60; i++) { + for (let i = 0; i < 120; i++) { const failure = []; const cancelled = []; const pending = []; @@ -47,7 +47,7 @@ jobs: const result = await github.rest.checks.listSuitesForRef({ owner: context.repo.owner, repo: context.repo.repo, - ref: "${{ github.event.pull_request.head.sha }}" + ref: "${{ github.head_ref }}" }); result.data.check_suites.forEach(({ app: { slug }, conclusion, id}) => { if (slug === 'github-actions') { @@ -86,8 +86,10 @@ jobs: core.summary.addList(failedCheckRuns); core.summary.write() - core.setFailed(`${failure.length} workflow(s) failed`); - return; + if (failedCheckRuns.length > 0) { + core.setFailed(`${failedCheckRuns.length} job(s) failed`); + return; + } } if (pending.length === 1) { diff --git a/.github/workflows/docker-builder.yml b/.github/workflows/docker-builder.yml index 5f1ed5aea0a..522860f28cd 100644 --- a/.github/workflows/docker-builder.yml +++ b/.github/workflows/docker-builder.yml @@ -17,13 +17,13 @@ on: - '.github/docker/Dockerfile.centreon-collect-*' jobs: - get-version: - uses: ./.github/workflows/get-version.yml + get-environment: + uses: ./.github/workflows/get-environment.yml with: version_file: CMakeLists.txt create-and-push-docker: - needs: [get-version] + needs: [get-environment] strategy: fail-fast: false @@ -32,39 +32,39 @@ jobs: - runner: collect dockerfile: centreon-collect-alma8 image: centreon-collect-alma8 - tag: ${{ needs.get-version.outputs.img_version }} + tag: ${{ needs.get-environment.outputs.img_version }} - runner: collect dockerfile: centreon-collect-alma9 image: centreon-collect-alma9 - tag: ${{ needs.get-version.outputs.img_version }} + tag: ${{ needs.get-environment.outputs.img_version }} - runner: collect dockerfile: centreon-collect-alma9-test image: centreon-collect-alma9-test - tag: ${{ needs.get-version.outputs.test_img_version }} + tag: ${{ needs.get-environment.outputs.test_img_version }} - runner: collect dockerfile: centreon-collect-mysql-alma9 image: centreon-collect-mysql-alma9 - tag: ${{ needs.get-version.outputs.img_version }} + tag: ${{ needs.get-environment.outputs.img_version }} - runner: collect dockerfile: centreon-collect-mysql-alma9-test image: centreon-collect-mysql-alma9-test - tag: ${{ needs.get-version.outputs.test_img_version }} + tag: ${{ needs.get-environment.outputs.test_img_version }} - runner: collect dockerfile: centreon-collect-debian-bullseye image: centreon-collect-debian-bullseye - tag: ${{ needs.get-version.outputs.img_version }} + tag: ${{ needs.get-environment.outputs.img_version }} - runner: collect dockerfile: centreon-collect-debian-bullseye-test image: centreon-collect-debian-bullseye-test - tag: ${{ needs.get-version.outputs.test_img_version }} + tag: ${{ needs.get-environment.outputs.test_img_version }} - runner: collect-arm64 dockerfile: centreon-collect-debian-bullseye image: centreon-collect-debian-bullseye-arm64 - tag: ${{ needs.get-version.outputs.img_version }} + tag: ${{ needs.get-environment.outputs.img_version }} - runner: collect-arm64 dockerfile: centreon-collect-debian-bullseye-test image: centreon-collect-debian-bullseye-arm64-test - tag: ${{ needs.get-version.outputs.test_img_version }} + tag: ${{ needs.get-environment.outputs.test_img_version }} runs-on: ${{ fromJson(format('["self-hosted", "{0}"]', matrix.runner)) }} @@ -78,15 +78,15 @@ jobs: uses: docker/login-action@0d4c9c5ea7693da7b068278f7b52bda2a190a446 # v3.2.0 with: registry: ${{ vars.DOCKER_INTERNAL_REGISTRY_URL }} - username: ${{ secrets.DOCKER_REGISTRY_ID }} - password: ${{ secrets.DOCKER_REGISTRY_PASSWD }} + username: ${{ secrets.HARBOR_CENTREON_PUSH_USERNAME }} + password: ${{ secrets.HARBOR_CENTREON_PUSH_TOKEN }} - name: Login to Proxy Registry uses: docker/login-action@0d4c9c5ea7693da7b068278f7b52bda2a190a446 # v3.2.0 with: registry: ${{ vars.DOCKER_PROXY_REGISTRY_URL }} - username: ${{ secrets.DOCKER_REGISTRY_ID }} - password: ${{ secrets.DOCKER_REGISTRY_PASSWD }} + username: ${{ secrets.HARBOR_CENTREON_PULL_USERNAME }} + password: ${{ secrets.HARBOR_CENTREON_PULL_TOKEN }} - uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb # v3.3.0 diff --git a/.github/workflows/get-environment.yml b/.github/workflows/get-environment.yml new file mode 100644 index 00000000000..d88b4128337 --- /dev/null +++ b/.github/workflows/get-environment.yml @@ -0,0 +1,290 @@ +on: + workflow_call: + inputs: + version_file: + required: false + type: string + default: CMakeLists.txt + outputs: + latest_major_version: + description: "latest major version" + value: ${{ jobs.get-environment.outputs.latest_major_version }} + is_cloud: + description: "context of release (cloud or not cloud)" + value: ${{ jobs.get-environment.outputs.is_cloud }} + major_version: + description: "major version" + value: ${{ jobs.get-environment.outputs.major_version }} + minor_version: + description: "minor version" + value: ${{ jobs.get-environment.outputs.minor_version }} + release: + description: "release" + value: ${{ jobs.get-environment.outputs.release }} + stability: + description: "branch stability (stable, testing, unstable, canary)" + value: ${{ jobs.get-environment.outputs.stability }} + target_stability: + description: "Final target branch stability (stable, testing, unstable, canary or not defined if not a pull request)" + value: ${{ jobs.get-environment.outputs.target_stability }} + release_type: + description: "type of release (hotfix, release or not defined if not a release)" + value: ${{ jobs.get-environment.outputs.release_type }} + is_targeting_feature_branch: + description: "if it is a PR, check if targeting a feature branch" + value: ${{ jobs.get-environment.outputs.is_targeting_feature_branch }} + img_version: + description: "docker image version (vcpkg checksum)" + value: ${{ jobs.get-environment.outputs.img_version }} + test_img_version: + description: "test docker image version (checksum of database sql, script and dockerfiles)" + value: ${{ jobs.get-environment.outputs.test_img_version }} + gorgone_docker_version: + description: "md5 of gorgone dockerfile" + value: ${{ jobs.get-environment.outputs.gorgone_docker_version }} + +jobs: + get-environment: + runs-on: ubuntu-24.04 + outputs: + latest_major_version: ${{ steps.latest_major_version.outputs.latest_major_version }} + is_cloud: ${{ steps.detect_cloud_version.outputs.result }} + major_version: ${{ steps.get_version.outputs.major_version }} + minor_version: ${{ steps.get_version.outputs.minor_version }} + release: ${{ steps.get_release.outputs.release }} + stability: ${{ steps.get_stability.outputs.stability }} + target_stability: ${{ steps.get_stability.outputs.target_stability }} + release_type: ${{ steps.get_release_type.outputs.release_type }} + is_targeting_feature_branch: ${{ steps.get_stability.outputs.is_targeting_feature_branch }} + img_version: ${{ steps.get_docker_images_version.outputs.img_version }} + test_img_version: ${{ steps.get_docker_images_version.outputs.test_img_version }} + gorgone_docker_version: ${{ steps.get_docker_images_version.outputs.gorgone_docker_version }} + + steps: + - name: Checkout sources (current branch) + uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 + + # get latest major version to detect cloud / on-prem versions + - name: Checkout sources (develop branch) + uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 + with: + ref: develop + path: centreon-develop + sparse-checkout: .version + + - name: Store latest major version + id: latest_major_version + run: | + . centreon-develop/.version + echo "latest_major_version=$MAJOR" >> $GITHUB_OUTPUT + shell: bash + + - if: ${{ github.event_name == 'pull_request' }} + name: Get nested pull request path + id: pr_path + uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 + with: + script: | + const prPath = ['${{ github.head_ref }}', '${{ github.base_ref }}']; + + const result = await github.rest.pulls.list({ + owner: context.repo.owner, + repo: context.repo.repo, + per_page: 100, + state: 'open' + }); + + let found = true; + while (found) { + found = false; + result.data.forEach(({ head: { ref: headRef }, base: { ref: baseRef} }) => { + if (headRef === prPath[prPath.length - 1] && ! prPath.includes(baseRef)) { + found = true; + prPath.push(baseRef); + } + }); + } + + return prPath; + + - name: Get stability + id: get_stability + uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 + with: + script: | + const getStability = (branchName) => { + switch (true) { + case /(^develop$)|(^dev-\d{2}\.\d{2}\.x$)|(^prepare-release-cloud.*)/.test(branchName): + return 'unstable'; + case /(^release.+)|(^hotfix.+)/.test(branchName): + return 'testing'; + case /(^master$)|(^\d{2}\.\d{2}\.x$)/.test(branchName): + return 'stable'; + default: + return 'canary'; + } + }; + + core.setOutput('stability', getStability('${{ github.head_ref || github.ref_name }}')); + + let isTargetingFeatureBranch = false; + if ("${{ github.event_name }}" === "pull_request") { + let targetStability = 'canary'; + const prPath = ${{ steps.pr_path.outputs.result || '[]' }}; + prPath.shift(); // remove current branch + + if (prPath.length && getStability(prPath[0]) === 'canary') { + isTargetingFeatureBranch = true; + } + + prPath.every((branchName) => { + console.log(`checking stability of ${branchName}`) + targetStability = getStability(branchName); + + if (targetStability !== 'canary') { + return false; + } + + return true; + }); + + core.setOutput('target_stability', targetStability); + } + + core.setOutput('is_targeting_feature_branch', isTargetingFeatureBranch); + + - name: Get version from ${{ inputs.version_file }} + id: get_version + run: | + if [[ "${{ inputs.version_file }}" == */.version ]]; then + . .version + . ${{ inputs.version_file }} + VERSION="$MAJOR.$MINOR" + elif [[ "${{ inputs.version_file }}" == CMakeLists.txt ]]; then + MAJOR=$(awk '$1 ~ "COLLECT_MAJOR" {maj=substr($2, 1, length($2)-1)} $1 ~ "COLLECT_MINOR" {min=substr($2, 1, length($2)-1) ; print maj "." min}' CMakeLists.txt) + MINOR=$(awk '$1 ~ "COLLECT_PATCH" {print substr($2, 1, length($2) - 1)}' CMakeLists.txt) + VERSION="$MAJOR.$MINOR" + else + echo "Unable to parse version file ${{ inputs.version_file }}" + exit 1 + fi + + if grep -E '^[2-9][0-9]\.[0-9][0-9]\.[0-9]+' <<<"$VERSION" >/dev/null 2>&1 ; then + n=${VERSION//[!0-9]/ } + a=(${n//\./ }) + echo "major_version=${a[0]}.${a[1]}" >> $GITHUB_OUTPUT + MAJOR=${a[0]}.${a[1]} + echo "minor_version=${a[2]}" >> $GITHUB_OUTPUT + else + echo "Cannot parse version number from ${{ inputs.version_file }}" + exit 1 + fi + shell: bash + + - name: "Get release: 1 for testing / stable, . for others" + id: get_release + run: | + if [[ "${{ steps.get_stability.outputs.stability }}" == "testing" || "${{ steps.get_stability.outputs.stability }}" == "stable" ]]; then + RELEASE="1" + else + RELEASE="$(date +%s).$(echo ${{ github.sha }} | cut -c -7)" + fi + + echo "release=$RELEASE" >> $GITHUB_OUTPUT + shell: bash + + - name: "Get release type: hotfix, release or not defined if not a release" + id: get_release_type + run: | + RELEASE_TYPE=$(echo "${{ github.head_ref || github.ref_name }}" | cut -d '-' -f 1) + if [[ "$RELEASE_TYPE" == "hotfix" || "$RELEASE_TYPE" == "release" ]]; then + echo "release_type=$RELEASE_TYPE" >> $GITHUB_OUTPUT + fi + shell: bash + + - name: "Detect cloud version" + id: detect_cloud_version + uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 + with: + script: | + if ("${{ github.event_name }}" === "pull_request") { + const prPath = ${{ steps.pr_path.outputs.result || '[]' }}; + const finalTargetBranch = prPath.pop(); + if (['develop', 'master'].includes(finalTargetBranch)) { + return true; + } else if (/\d{2}\.\d{2}\.x$/.test(finalTargetBranch)) { + return false; + } + } + + const developMajorVersion = "${{ steps.latest_major_version.outputs.latest_major_version }}"; + const currentMajorVersion = "${{ steps.get_version.outputs.major_version }}"; + + if (Number(currentMajorVersion) >= Number(developMajorVersion)) { + return true; + } + + return false; + + - name: Get docker images version + id: get_docker_images_version + run: | + IMG_VERSION=$( cat `ls .github/docker/Dockerfile.centreon-collect-* | grep -v test` vcpkg.json | md5sum | awk '{print substr($1, 0, 8)}') + echo "img_version=$IMG_VERSION" >> $GITHUB_OUTPUT + + TEST_IMG_VERSION=$(cat .github/docker/Dockerfile.centreon-collect-*-test .github/scripts/collect-prepare-test-robot.sh resources/*.sql | md5sum | cut -c1-8) + echo "test_img_version=$TEST_IMG_VERSION" >> $GITHUB_OUTPUT + + GORGONE_DOCKER_VERSION=$(cat .github/docker/Dockerfile.gorgone-testing-* | md5sum | cut -c1-8) + echo "gorgone_docker_version=$GORGONE_DOCKER_VERSION" >> $GITHUB_OUTPUT + + - name: Display info in job summary + uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 + env: + VERSION_FILE: ${{ inputs.version_file }} + with: + script: | + const outputTable = [ + [{data: 'Name', header: true}, {data: 'Value', header: true}], + ['latest_major_version', '${{ steps.latest_major_version.outputs.latest_major_version }}'], + ['is_cloud', '${{ steps.detect_cloud_version.outputs.result }}'], + ['major_version', '${{ steps.get_version.outputs.major_version }}'], + ['minor_version', '${{ steps.get_version.outputs.minor_version }}'], + ['release', '${{ steps.get_release.outputs.release }}'], + ['stability', '${{ steps.get_stability.outputs.stability }}'], + ['release_type', '${{ steps.get_release_type.outputs.release_type || 'not defined because this is not a release' }}'], + ['is_targeting_feature_branch', '${{ steps.get_stability.outputs.is_targeting_feature_branch }}'], + ['img_version', '${{ steps.get_docker_images_version.outputs.img_version }}'], + ['test_img_version', '${{ steps.get_docker_images_version.outputs.test_img_version }}'], + ['gorgone_docker_version', '${{ steps.get_docker_images_version.outputs.gorgone_docker_version }}'], + ]; + + outputTable.push(['target_stability', '${{ steps.get_stability.outputs.target_stability || 'not defined because current run is not triggered by pull request event' }}']); + + core.summary + .addHeading(`${context.workflow} environment outputs`) + .addTable(outputTable); + + if ("${{ github.event_name }}" === "pull_request") { + const prPath = ${{ steps.pr_path.outputs.result || '[]' }}; + const mainBranchName = prPath.pop(); + let codeBlock = ` + %%{ init: { 'gitGraph': { 'mainBranchName': '${mainBranchName}', 'showCommitLabel': false } } }%% + gitGraph + commit`; + prPath.reverse().forEach((branchName) => { + codeBlock = `${codeBlock} + branch ${branchName} + checkout ${branchName} + commit`; + }); + + core.summary + .addHeading('Git workflow') + .addCodeBlock( + codeBlock, + "mermaid" + ); + } + + core.summary.write(); diff --git a/.github/workflows/get-version.yml b/.github/workflows/get-version.yml deleted file mode 100644 index 1d82b1deb8e..00000000000 --- a/.github/workflows/get-version.yml +++ /dev/null @@ -1,209 +0,0 @@ -on: - workflow_call: - inputs: - version_file: - required: false - type: string - default: CMakeLists.txt - outputs: - major_version: - description: "major version" - value: ${{ jobs.get-version.outputs.major_version }} - minor_version: - description: "minor version" - value: ${{ jobs.get-version.outputs.minor_version }} - img_version: - description: "docker image version (conan checksum)" - value: ${{ jobs.get-version.outputs.img_version }} - test_img_version: - description: "test docker image version (checksum of database sql, script and dockerfiles)" - value: ${{ jobs.get-version.outputs.test_img_version }} - version: - description: "major version" - value: ${{ jobs.get-version.outputs.version }} - release: - description: "release" - value: ${{ jobs.get-version.outputs.release }} - stability: - description: "branch stability (stable, testing, unstable, canary)" - value: ${{ jobs.get-version.outputs.stability }} - environment: - description: "branch stability (stable, testing, unstable, canary)" - value: ${{ jobs.get-version.outputs.environment }} - release_type: - description: "type of release (hotfix, release)" - value: ${{ jobs.get-version.outputs.release_type }} - release_cloud: - description: "context of release (cloud or not cloud)" - value: ${{ jobs.get-version.outputs.release_cloud }} - -jobs: - get-version: - runs-on: ubuntu-24.04 - outputs: - major_version: ${{ steps.get_version.outputs.major_version }} - minor_version: ${{ steps.get_version.outputs.minor_version }} - img_version: ${{ steps.get_version.outputs.img_version }} - test_img_version: ${{ steps.get_version.outputs.test_img_version }} - version: ${{ steps.get_version.outputs.version }} - release: ${{ steps.get_version.outputs.release }} - stability: ${{ steps.get_version.outputs.stability }} - environment: ${{ steps.get_version.outputs.env }} - release_type: ${{ steps.get_version.outputs.release_type }} - release_cloud: ${{ steps.get_version.outputs.release_cloud}} - - steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - - - name: install gh cli on self-hosted runner - run: | - if ! command -v gh &> /dev/null; then - echo "Installing GH CLI." - type -p curl >/dev/null || (sudo apt update && sudo apt install curl -y) - curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg - sudo chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg - echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null - sudo apt update - sudo apt install gh -y - else - echo "GH CLI is already installed." - fi - shell: bash - - - id: get_version - run: | - set -x - - if [[ "${{ inputs.version_file }}" == */.version ]]; then - . .version - . ${{ inputs.version_file }} - VERSION="$MAJOR.$MINOR" - elif [[ "${{ inputs.version_file }}" == CMakeLists.txt ]]; then - MAJOR=$(awk '$1 ~ "COLLECT_MAJOR" {maj=substr($2, 1, length($2)-1)} $1 ~ "COLLECT_MINOR" {min=substr($2, 1, length($2)-1) ; print maj "." min}' CMakeLists.txt) - MINOR=$(awk '$1 ~ "COLLECT_PATCH" {print substr($2, 1, length($2) - 1)}' CMakeLists.txt) - VERSION="$MAJOR.$MINOR" - else - echo "Unable to parse ${{ inputs.version_file }}" - exit 1 - fi - - echo "VERSION=$VERSION" - - if egrep '^[2-9][0-9]\.[0-9][0-9]\.[0-9]+' <<<"$VERSION" >/dev/null 2>&1 ; then - n=${VERSION//[!0-9]/ } - a=(${n//\./ }) - echo "major_version=${a[0]}.${a[1]}" >> $GITHUB_OUTPUT - MAJOR=${a[0]}.${a[1]} - echo "minor_version=${a[2]}" >> $GITHUB_OUTPUT - else - echo "Cannot parse version number from ${{ inputs.version_file }}" - exit 1 - fi - - IMG_VERSION=$( cat `ls .github/docker/Dockerfile.centreon-collect-* | grep -v test` conanfile.txt | md5sum | awk '{print substr($1, 0, 8)}') - TEST_IMG_VERSION=$(cat .github/docker/Dockerfile.centreon-collect-*-test .github/scripts/collect-prepare-test-robot.sh resources/*.sql | md5sum | cut -c1-8) - echo "img_version=$IMG_VERSION" >> $GITHUB_OUTPUT - echo "test_img_version=$TEST_IMG_VERSION" >> $GITHUB_OUTPUT - echo "version=$VERSION" >> $GITHUB_OUTPUT - - if [[ -z "$GITHUB_HEAD_REF" ]]; then - BRANCHNAME="$GITHUB_REF_NAME" - else - BRANCHNAME="$GITHUB_HEAD_REF" - fi - - echo "BRANCHNAME is: $BRANCHNAME" - - # Set default release values - GITHUB_RELEASE_CLOUD=0 - GITHUB_RELEASE_TYPE=$(echo $BRANCHNAME |cut -d '-' -f 1) - - case "$BRANCHNAME" in - master) - echo "release=1" >> $GITHUB_OUTPUT - echo "release_cloud=1" >> $GITHUB_OUTPUT - echo "release_type=$GITHUB_RELEASE_TYPE" >> $GITHUB_OUTPUT - ;; - [2-9][0-9].[0-9][0-9].x) - echo "release=1" >> $GITHUB_OUTPUT - echo "release_cloud=$GITHUB_RELEASE_CLOUD" >> $GITHUB_OUTPUT - echo "release_type=$GITHUB_RELEASE_TYPE" >> $GITHUB_OUTPUT - ;; - develop) - echo "release=`date +%s`.`echo ${{ github.sha }} | cut -c -7`" >> $GITHUB_OUTPUT - echo "release_cloud=1" >> $GITHUB_OUTPUT - echo "release_type=$GITHUB_RELEASE_TYPE" >> $GITHUB_OUTPUT - ;; - dev-[2-9][0-9].[0-9][0-9].x) - echo "release=`date +%s`.`echo ${{ github.sha }} | cut -c -7`" >> $GITHUB_OUTPUT - echo "release_cloud=0" >> $GITHUB_OUTPUT - echo "release_type=$GITHUB_RELEASE_TYPE" >> $GITHUB_OUTPUT - ;; - release* | hotfix*) - # Handle workflow_dispatch run triggers and run a dispatch ONLY for cloud release - GITHUB_RELEASE_BRANCH_BASE_REF_NAME="$(gh pr view $BRANCHNAME -q .baseRefName --json headRefName,baseRefName,state)" - echo "GITHUB_RELEASE_BRANCH_BASE_REF_NAME is: $GITHUB_RELEASE_BRANCH_BASE_REF_NAME" - GITHUB_RELEASE_BRANCH_PR_STATE="$(gh pr view $BRANCHNAME -q .state --json headRefName,baseRefName,state)" - echo "GITHUB_RELEASE_BRANCH_PR_STATE is: $GITHUB_RELEASE_BRANCH_PR_STATE" - - # Check if the release context (cloud and hotfix or cloud and release) - if [[ "$GITHUB_RELEASE_BRANCH_BASE_REF_NAME" == "master" ]] && [[ "$GITHUB_RELEASE_BRANCH_PR_STATE" == "OPEN" ]]; then - # Get release pull request ID - GITHUB_RELEASE_BRANCH_PR_NUMBER="$(gh pr view $BRANCHNAME -q .[] --json number)" - # Set release cloud to 1 (0=not-cloud, 1=cloud) - GITHUB_RELEASE_CLOUD=1 - # Debug - echo "GITHUB_RELEASE_TYPE is: $GITHUB_RELEASE_TYPE" - echo "GITHUB_RELEASE_BRANCH_PR_NUMBER is: $GITHUB_RELEASE_BRANCH_PR_NUMBER" # We do leave this here as debug help. - echo "GITHUB_RELEASE_CLOUD is: $GITHUB_RELEASE_CLOUD" - # Github ouputs - echo "release=`date +%s`.`echo ${{ github.sha }} | cut -c -7`" >> $GITHUB_OUTPUT - echo "release_type=$GITHUB_RELEASE_TYPE" >> $GITHUB_OUTPUT - echo "release_cloud=$GITHUB_RELEASE_CLOUD" >> $GITHUB_OUTPUT - else - echo "release=1" >> $GITHUB_OUTPUT - echo "release_cloud=$GITHUB_RELEASE_CLOUD" >> $GITHUB_OUTPUT - echo "release_type=$GITHUB_RELEASE_TYPE" >> $GITHUB_OUTPUT - fi - ;; - prepare-release-cloud*) - # Set release cloud to 1 (0=not-cloud, 1=cloud) - GITHUB_RELEASE_CLOUD=1 - # Debug - echo "GITHUB_RELEASE_TYPE is: $GITHUB_RELEASE_TYPE" - echo "GITHUB_RELEASE_CLOUD is: $GITHUB_RELEASE_CLOUD" - # Github ouputs - echo "release=`date +%s`.`echo ${{ github.sha }} | cut -c -7`" >> $GITHUB_OUTPUT - echo "release_type=$GITHUB_RELEASE_TYPE" >> $GITHUB_OUTPUT - echo "release_cloud=$GITHUB_RELEASE_CLOUD" >> $GITHUB_OUTPUT - ;; - *) - echo "release=`date +%s`.`echo ${{ github.sha }} | cut -c -7`" >> $GITHUB_OUTPUT - echo "release_cloud=$GITHUB_RELEASE_CLOUD" >> $GITHUB_OUTPUT - echo "release_type=$GITHUB_RELEASE_TYPE" >> $GITHUB_OUTPUT - ;; - esac - - case "$BRANCHNAME" in - develop | dev-[2-9][0-9].[0-9][0-9].x) - STABILITY="unstable" - ENV="development" - ;; - release* | hotfix*) - STABILITY="testing" - ENV="testing" - ;; - master | [2-9][0-9].[0-9][0-9].x) - STABILITY="stable" - ENV="production" - ;; - *) - STABILITY="canary" - ;; - esac - echo "stability=$STABILITY" >> $GITHUB_OUTPUT - echo "env=$VERSION-$ENV" >> $GITHUB_OUTPUT - echo "GH_ENV: $VERSION-$ENV" - shell: bash - env: - GH_TOKEN: ${{ github.token }} diff --git a/.github/workflows/gorgone.yml b/.github/workflows/gorgone.yml index ab1e88ac8fd..1f3874fcac8 100644 --- a/.github/workflows/gorgone.yml +++ b/.github/workflows/gorgone.yml @@ -33,30 +33,31 @@ env: base_directory: gorgone jobs: - get-version: - uses: ./.github/workflows/get-version.yml + get-environment: + uses: ./.github/workflows/get-environment.yml with: version_file: gorgone/.version veracode-analysis: - needs: [get-version] + needs: [get-environment] + if: ${{ needs.get-environment.outputs.is_targeting_feature_branch != 'true' && github.event.pull_request.draft != 'true' }} uses: ./.github/workflows/veracode-analysis.yml with: module_directory: gorgone module_name: centreon-gorgone - major_version: ${{ needs.get-version.outputs.major_version }} - minor_version: ${{ needs.get-version.outputs.minor_version }} - img_version: ${{ needs.get-version.outputs.img_version }} + major_version: ${{ needs.get-environment.outputs.major_version }} + minor_version: ${{ needs.get-environment.outputs.minor_version }} + img_version: ${{ needs.get-environment.outputs.img_version }} secrets: veracode_api_id: ${{ secrets.VERACODE_API_ID_GORG }} veracode_api_key: ${{ secrets.VERACODE_API_KEY_GORG }} veracode_srcclr_token: ${{ secrets.VERACODE_SRCCLR_TOKEN }} - docker_registry_id: ${{ secrets.DOCKER_REGISTRY_ID }} - docker_registry_passwd: ${{ secrets.DOCKER_REGISTRY_PASSWD }} + docker_registry_id: ${{ secrets.HARBOR_CENTREON_PULL_USERNAME }} + docker_registry_passwd: ${{ secrets.HARBOR_CENTREON_PULL_TOKEN }} package: - needs: [get-version] - if: ${{ needs.get-version.outputs.stability != 'stable' }} + needs: [get-environment] + if: ${{ needs.get-environment.outputs.stability != 'stable' }} strategy: fail-fast: false @@ -80,19 +81,21 @@ jobs: package_extension: ${{ matrix.package_extension }} image_name: ${{ matrix.image }} module_name: gorgone - major_version: ${{ needs.get-version.outputs.major_version }} - minor_version: ${{ needs.get-version.outputs.minor_version }} - release: ${{ needs.get-version.outputs.release }} + major_version: ${{ needs.get-environment.outputs.major_version }} + minor_version: ${{ needs.get-environment.outputs.minor_version }} + release: ${{ needs.get-environment.outputs.release }} commit_hash: ${{ github.sha }} cache_key: ${{ github.sha }}-${{ github.run_id }}-${{ matrix.package_extension }}-${{ matrix.distrib }} secrets: - registry_username: ${{ secrets.DOCKER_REGISTRY_ID }} - registry_password: ${{ secrets.DOCKER_REGISTRY_PASSWD }} + registry_username: ${{ secrets.HARBOR_CENTREON_PULL_USERNAME }} + registry_password: ${{ secrets.HARBOR_CENTREON_PULL_TOKEN }} + registry_private_username: ${{ secrets.HARBOR_RPM_GPG_SIGNING_REPO_USERNAME }} + registry_private_token: ${{ secrets.HARBOR_RPM_GPG_SIGNING_REPO_TOKEN }} deliver-sources: runs-on: [self-hosted, common] - needs: [get-version, package] - if: ${{ contains(fromJson('["stable"]'), needs.get-version.outputs.stability) && github.event_name != 'workflow_dispatch' }} + needs: [get-environment, package] + if: ${{ contains(fromJson('["stable"]'), needs.get-environment.outputs.stability) && github.event_name != 'workflow_dispatch' }} steps: - name: Checkout sources @@ -104,14 +107,14 @@ jobs: bucket_directory: centreon-gorgone module_directory: gorgone module_name: centreon-gorgone - major_version: ${{ needs.get-version.outputs.major_version }} - minor_version: ${{ needs.get-version.outputs.minor_version }} + major_version: ${{ needs.get-environment.outputs.major_version }} + minor_version: ${{ needs.get-environment.outputs.minor_version }} token_download_centreon_com: ${{ secrets.TOKEN_DOWNLOAD_CENTREON_COM }} deliver-rpm: runs-on: [self-hosted, common] - needs: [get-version, package] - if: ${{ contains(fromJson('["testing", "unstable"]'), needs.get-version.outputs.stability) }} + needs: [get-environment, package] + if: ${{ contains(fromJson('["testing", "unstable"]'), needs.get-environment.outputs.stability) }} strategy: matrix: @@ -126,17 +129,17 @@ jobs: with: module_name: gorgone distrib: ${{ matrix.distrib }} - version: ${{ needs.get-version.outputs.major_version }} + version: ${{ needs.get-environment.outputs.major_version }} artifactory_token: ${{ secrets.ARTIFACTORY_ACCESS_TOKEN }} cache_key: ${{ github.sha }}-${{ github.run_id }}-rpm-${{ matrix.distrib }} - stability: ${{ needs.get-version.outputs.stability }} - release_type: ${{ needs.get-version.outputs.release_type }} - release_cloud: ${{ needs.get-version.outputs.release_cloud }} + stability: ${{ needs.get-environment.outputs.stability }} + release_type: ${{ needs.get-environment.outputs.release_type }} + is_cloud: ${{ needs.get-environment.outputs.is_cloud }} deliver-deb: runs-on: [self-hosted, common] - needs: [get-version, package] - if: ${{ contains(fromJson('["testing", "unstable"]'), needs.get-version.outputs.stability) }} + needs: [get-environment, package] + if: ${{ contains(fromJson('["testing", "unstable"]'), needs.get-environment.outputs.stability) }} strategy: matrix: @@ -151,16 +154,16 @@ jobs: with: module_name: gorgone distrib: ${{ matrix.distrib }} - version: ${{ needs.get-version.outputs.major_version }} + version: ${{ needs.get-environment.outputs.major_version }} artifactory_token: ${{ secrets.ARTIFACTORY_ACCESS_TOKEN }} cache_key: ${{ github.sha }}-${{ github.run_id }}-deb-${{ matrix.distrib }} - stability: ${{ needs.get-version.outputs.stability }} - release_type: ${{ needs.get-version.outputs.release_type }} - release_cloud: ${{ needs.get-version.outputs.release_cloud }} + stability: ${{ needs.get-environment.outputs.stability }} + release_type: ${{ needs.get-environment.outputs.release_type }} + is_cloud: ${{ needs.get-environment.outputs.is_cloud }} promote: - needs: [get-version] - if: ${{ contains(fromJson('["stable"]'), needs.get-version.outputs.stability) && github.event_name != 'workflow_dispatch' }} + needs: [get-environment] + if: ${{ contains(fromJson('["stable"]'), needs.get-environment.outputs.stability) && github.event_name != 'workflow_dispatch' }} runs-on: [self-hosted, common] strategy: matrix: @@ -176,8 +179,8 @@ jobs: artifactory_token: ${{ secrets.ARTIFACTORY_ACCESS_TOKEN }} module_name: gorgone distrib: ${{ matrix.distrib }} - major_version: ${{ needs.get-version.outputs.major_version }} - stability: ${{ needs.get-version.outputs.stability }} + major_version: ${{ needs.get-environment.outputs.major_version }} + stability: ${{ needs.get-environment.outputs.stability }} github_ref_name: ${{ github.ref_name }} - release_type: ${{ needs.get-version.outputs.release_type }} - release_cloud: ${{ needs.get-version.outputs.release_cloud }} + release_type: ${{ needs.get-environment.outputs.release_type }} + is_cloud: ${{ needs.get-environment.outputs.is_cloud }} diff --git a/.github/workflows/libzmq.yml b/.github/workflows/libzmq.yml index a652c798ab0..464f4ddd77b 100644 --- a/.github/workflows/libzmq.yml +++ b/.github/workflows/libzmq.yml @@ -19,11 +19,11 @@ on: - '.github/workflows/libzmq.yml' jobs: - get-version: - uses: ./.github/workflows/get-version.yml + get-environment: + uses: ./.github/workflows/get-environment.yml package-rpm: - needs: [get-version] + needs: [get-environment] strategy: fail-fast: false @@ -36,13 +36,13 @@ jobs: distrib: el9 arch: amd64 - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 container: - image: ${{ vars.DOCKER_INTERNAL_REGISTRY_URL }}/${{ matrix.image }}:${{ needs.get-version.outputs.major_version }} + image: ${{ vars.DOCKER_INTERNAL_REGISTRY_URL }}/${{ matrix.image }}:${{ needs.get-environment.outputs.major_version }} credentials: - username: ${{ secrets.DOCKER_REGISTRY_ID }} - password: ${{ secrets.DOCKER_REGISTRY_PASSWD }} + username: ${{ secrets.HARBOR_CENTREON_PULL_USERNAME }} + password: ${{ secrets.HARBOR_CENTREON_PULL_TOKEN }} name: package ${{ matrix.distrib }} @@ -77,7 +77,7 @@ jobs: key: ${{ github.run_id }}-${{ github.sha }}-rpm-libzmq-${{ matrix.distrib }}-${{ matrix.arch }} package-deb: - needs: [get-version] + needs: [get-environment] strategy: fail-fast: false @@ -95,10 +95,10 @@ jobs: runs-on: ${{ matrix.runner }} container: - image: ${{ vars.DOCKER_INTERNAL_REGISTRY_URL }}/${{ matrix.image }}:${{ needs.get-version.outputs.major_version }} + image: ${{ vars.DOCKER_INTERNAL_REGISTRY_URL }}/${{ matrix.image }}:${{ needs.get-environment.outputs.major_version }} credentials: - username: ${{ secrets.DOCKER_REGISTRY_ID }} - password: ${{ secrets.DOCKER_REGISTRY_PASSWD }} + username: ${{ secrets.HARBOR_CENTREON_PULL_USERNAME }} + password: ${{ secrets.HARBOR_CENTREON_PULL_TOKEN }} name: package ${{ matrix.distrib }} ${{ matrix.arch }} @@ -133,9 +133,8 @@ jobs: key: ${{ github.run_id }}-${{ github.sha }}-deb-libzmq-${{ matrix.distrib }}-${{ matrix.arch }} deliver-rpm: - if: ${{ contains(fromJson('["testing", "unstable"]'), needs.get-version.outputs.stability) }} - needs: [get-version, package-rpm] - environment: ${{ needs.get-version.outputs.environment }} + if: ${{ contains(fromJson('["testing", "unstable"]'), needs.get-environment.outputs.stability) }} + needs: [get-environment, package-rpm] runs-on: [self-hosted, common] strategy: matrix: @@ -156,17 +155,16 @@ jobs: with: module_name: libzmq distrib: ${{ matrix.distrib }} - major_version: ${{ needs.get-version.outputs.major_version }} + major_version: ${{ needs.get-environment.outputs.major_version }} artifactory_token: ${{ secrets.ARTIFACTORY_ACCESS_TOKEN }} cache_key: ${{ github.run_id }}-${{ github.sha }}-rpm-libzmq-${{ matrix.distrib }}-${{ matrix.arch }} - stability: ${{ needs.get-version.outputs.stability }} - release_type: ${{ needs.get-version.outputs.release_type }} - release_cloud: ${{ needs.get-version.outputs.release_cloud }} + stability: ${{ needs.get-environment.outputs.stability }} + release_type: ${{ needs.get-environment.outputs.release_type }} + is_cloud: ${{ needs.get-environment.outputs.is_cloud }} deliver-deb: - if: ${{ contains(fromJson('["testing", "unstable"]'), needs.get-version.outputs.stability) }} - needs: [get-version, package-deb] - environment: ${{ needs.get-version.outputs.environment }} + if: ${{ contains(fromJson('["testing", "unstable"]'), needs.get-environment.outputs.stability) }} + needs: [get-environment, package-deb] runs-on: [self-hosted, common] strategy: matrix: @@ -187,16 +185,16 @@ jobs: with: module_name: libzmq distrib: ${{ matrix.distrib }} - major_version: ${{ needs.get-version.outputs.major_version }} + major_version: ${{ needs.get-environment.outputs.major_version }} artifactory_token: ${{ secrets.ARTIFACTORY_ACCESS_TOKEN }} cache_key: ${{ github.run_id }}-${{ github.sha }}-deb-libzmq-${{ matrix.distrib }}-${{ matrix.arch }} - stability: ${{ needs.get-version.outputs.stability }} - release_type: ${{ needs.get-version.outputs.release_type }} - release_cloud: ${{ needs.get-version.outputs.release_cloud }} + stability: ${{ needs.get-environment.outputs.stability }} + release_type: ${{ needs.get-environment.outputs.release_type }} + is_cloud: ${{ needs.get-environment.outputs.is_cloud }} promote: - needs: [get-version] - if: ${{ contains(fromJson('["stable"]'), needs.get-version.outputs.stability) && github.event_name != 'workflow_dispatch' }} + needs: [get-environment] + if: ${{ contains(fromJson('["stable"]'), needs.get-environment.outputs.stability) && github.event_name != 'workflow_dispatch' }} runs-on: [self-hosted, common] strategy: matrix: @@ -212,8 +210,8 @@ jobs: artifactory_token: ${{ secrets.ARTIFACTORY_ACCESS_TOKEN }} module_name: libzmq distrib: ${{ matrix.distrib }} - major_version: ${{ needs.get-version.outputs.major_version }} - stability: ${{ needs.get-version.outputs.stability }} + major_version: ${{ needs.get-environment.outputs.major_version }} + stability: ${{ needs.get-environment.outputs.stability }} github_ref_name: ${{ github.ref_name }} - release_type: ${{ needs.get-version.outputs.release_type }} - release_cloud: ${{ needs.get-version.outputs.release_cloud }} + release_type: ${{ needs.get-environment.outputs.release_type }} + is_cloud: ${{ needs.get-environment.outputs.is_cloud }} diff --git a/.github/workflows/lua-curl.yml b/.github/workflows/lua-curl.yml index ead06e71c3b..e88c068c22e 100644 --- a/.github/workflows/lua-curl.yml +++ b/.github/workflows/lua-curl.yml @@ -24,12 +24,12 @@ env: release: 10 # 10 for openssl 1.1.1 / 20 for openssl system jobs: - get-version: - uses: ./.github/workflows/get-version.yml + get-environment: + uses: ./.github/workflows/get-environment.yml package: - needs: [get-version] - if: ${{ needs.get-version.outputs.stability != 'stable' }} + needs: [get-environment] + if: ${{ needs.get-environment.outputs.stability != 'stable' }} strategy: fail-fast: false @@ -63,10 +63,10 @@ jobs: runs-on: ${{ matrix.runner }} container: - image: ${{ vars.DOCKER_INTERNAL_REGISTRY_URL }}/${{ matrix.image }}:${{ needs.get-version.outputs.img_version }} + image: ${{ vars.DOCKER_INTERNAL_REGISTRY_URL }}/${{ matrix.image }}:${{ needs.get-environment.outputs.img_version }} credentials: - username: ${{ secrets.DOCKER_REGISTRY_ID }} - password: ${{ secrets.DOCKER_REGISTRY_PASSWD }} + username: ${{ secrets.HARBOR_CENTREON_PULL_USERNAME }} + password: ${{ secrets.HARBOR_CENTREON_PULL_TOKEN }} name: package ${{ matrix.distrib }} ${{ matrix.arch }} @@ -155,12 +155,12 @@ jobs: rpm_gpg_key: ${{ secrets.RPM_GPG_SIGNING_KEY }} rpm_gpg_signing_key_id: ${{ secrets.RPM_GPG_SIGNING_KEY_ID }} rpm_gpg_signing_passphrase: ${{ secrets.RPM_GPG_SIGNING_PASSPHRASE }} - stability: ${{ needs.get-version.outputs.stability }} + stability: ${{ needs.get-environment.outputs.stability }} deliver-rpm: - if: ${{ contains(fromJson('["testing", "unstable"]'), needs.get-version.outputs.stability) }} - needs: [get-version, package] - runs-on: ubuntu-22.04 + if: ${{ contains(fromJson('["testing", "unstable"]'), needs.get-environment.outputs.stability) }} + needs: [get-environment, package] + runs-on: ubuntu-24.04 strategy: matrix: include: @@ -179,17 +179,17 @@ jobs: with: module_name: lua-curl distrib: ${{ matrix.distrib }} - version: ${{ needs.get-version.outputs.major_version }} + version: ${{ needs.get-environment.outputs.major_version }} artifactory_token: ${{ secrets.ARTIFACTORY_ACCESS_TOKEN }} cache_key: ${{ github.sha }}-${{ github.run_id }}-rpm-lua-curl-${{ matrix.distrib }}-${{ matrix.arch }} - stability: ${{ needs.get-version.outputs.stability }} - release_type: ${{ needs.get-version.outputs.release_type }} - release_cloud: ${{ needs.get-version.outputs.release_cloud }} + stability: ${{ needs.get-environment.outputs.stability }} + release_type: ${{ needs.get-environment.outputs.release_type }} + is_cloud: ${{ needs.get-environment.outputs.is_cloud }} deliver-deb: - if: ${{ contains(fromJson('["testing", "unstable"]'), needs.get-version.outputs.stability) }} - needs: [get-version, package] - runs-on: ubuntu-22.04 + if: ${{ contains(fromJson('["testing", "unstable"]'), needs.get-environment.outputs.stability) }} + needs: [get-environment, package] + runs-on: ubuntu-24.04 strategy: matrix: include: @@ -208,16 +208,16 @@ jobs: with: module_name: lua-curl distrib: ${{ matrix.distrib }} - version: ${{ needs.get-version.outputs.major_version }} + version: ${{ needs.get-environment.outputs.major_version }} artifactory_token: ${{ secrets.ARTIFACTORY_ACCESS_TOKEN }} cache_key: ${{ github.sha }}-${{ github.run_id }}-deb-lua-curl-${{ matrix.distrib }}-${{ matrix.arch }} - stability: ${{ needs.get-version.outputs.stability }} - release_type: ${{ needs.get-version.outputs.release_type }} - release_cloud: ${{ needs.get-version.outputs.release_cloud }} + stability: ${{ needs.get-environment.outputs.stability }} + release_type: ${{ needs.get-environment.outputs.release_type }} + is_cloud: ${{ needs.get-environment.outputs.is_cloud }} promote: - needs: [get-version] - if: ${{ contains(fromJson('["stable"]'), needs.get-version.outputs.stability) && github.event_name != 'workflow_dispatch' }} + needs: [get-environment] + if: ${{ contains(fromJson('["stable"]'), needs.get-environment.outputs.stability) && github.event_name != 'workflow_dispatch' }} runs-on: [self-hosted, common] strategy: matrix: @@ -233,8 +233,8 @@ jobs: artifactory_token: ${{ secrets.ARTIFACTORY_ACCESS_TOKEN }} module_name: lua-curl distrib: ${{ matrix.distrib }} - major_version: ${{ needs.get-version.outputs.major_version }} - stability: ${{ needs.get-version.outputs.stability }} + major_version: ${{ needs.get-environment.outputs.major_version }} + stability: ${{ needs.get-environment.outputs.stability }} github_ref_name: ${{ github.ref_name }} - release_type: ${{ needs.get-version.outputs.release_type }} - release_cloud: ${{ needs.get-version.outputs.release_cloud }} + release_type: ${{ needs.get-environment.outputs.release_type }} + is_cloud: ${{ needs.get-environment.outputs.is_cloud }} diff --git a/.github/workflows/package-collect.yml b/.github/workflows/package-collect.yml index 983bc118483..112e953a22e 100644 --- a/.github/workflows/package-collect.yml +++ b/.github/workflows/package-collect.yml @@ -54,8 +54,8 @@ jobs: container: image: ${{ vars.DOCKER_INTERNAL_REGISTRY_URL }}/${{ matrix.image }}:${{ inputs.img_version }} credentials: - username: ${{ secrets.DOCKER_REGISTRY_ID }} - password: ${{ secrets.DOCKER_REGISTRY_PASSWD }} + username: ${{ secrets.HARBOR_CENTREON_PULL_USERNAME }} + password: ${{ secrets.HARBOR_CENTREON_PULL_TOKEN }} name: package ${{ matrix.distrib }} ${{ matrix.arch }} @@ -145,7 +145,7 @@ jobs: run: rm -rf *-debuginfo*.${{ matrix.package_extension }} # set condition to true if artifacts are needed - - if: ${{ false }} + - if: ${{ true }} name: Upload package artifacts uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 with: diff --git a/.github/workflows/package.yml b/.github/workflows/package.yml index e01f3ff5683..cd1bda7047f 100644 --- a/.github/workflows/package.yml +++ b/.github/workflows/package.yml @@ -79,6 +79,10 @@ on: required: true registry_password: required: true + registry_private_username: + required: true + registry_private_token: + required: true jobs: package: @@ -127,11 +131,11 @@ jobs: needs: [package] runs-on: ubuntu-22.04 container: - image: ${{ vars.DOCKER_INTERNAL_REGISTRY_URL }}/rpm-signing:ubuntu + image: docker.centreon.com/centreon-private/rpm-signing:latest options: -t credentials: - username: ${{ secrets.registry_username }} - password: ${{ secrets.registry_password }} + username: ${{ secrets.registry_private_username }} + password: ${{ secrets.registry_private_token }} steps: - run: | diff --git a/.github/workflows/rebase-master.yml b/.github/workflows/rebase-master.yml index c2241297a0e..e9336085c6c 100644 --- a/.github/workflows/rebase-master.yml +++ b/.github/workflows/rebase-master.yml @@ -12,7 +12,7 @@ on: jobs: main: name: Sync Stable Branches - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 if: github.event.pull_request.merged == true steps: - name: git checkout diff --git a/.github/workflows/rebase-version.yml b/.github/workflows/rebase-version.yml index 4be9a45361f..8c3f36d26cc 100644 --- a/.github/workflows/rebase-version.yml +++ b/.github/workflows/rebase-version.yml @@ -12,7 +12,7 @@ on: jobs: main: name: Sync Stable Branches - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 if: github.event.pull_request.merged == true steps: - name: git checkout diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e20fa63d9d6..aa40dc0d7e4 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -21,7 +21,7 @@ on: jobs: release: if: ${{ github.event.pull_request.merged == true }} - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Check base_ref run: | diff --git a/.github/workflows/robot-nightly.yml b/.github/workflows/robot-nightly.yml index 125e862f0b8..19cedc7d425 100644 --- a/.github/workflows/robot-nightly.yml +++ b/.github/workflows/robot-nightly.yml @@ -27,38 +27,38 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - get-version: - uses: ./.github/workflows/get-version.yml + get-environment: + uses: ./.github/workflows/get-environment.yml veracode-analysis: - needs: [get-version] + needs: [get-environment] uses: ./.github/workflows/veracode-analysis.yml with: module_name: centreon-collect - major_version: ${{ needs.get-version.outputs.major_version }} - minor_version: ${{ needs.get-version.outputs.minor_version }} - img_version: ${{ needs.get-version.outputs.img_version }} + major_version: ${{ needs.get-environment.outputs.major_version }} + minor_version: ${{ needs.get-environment.outputs.minor_version }} + img_version: ${{ needs.get-environment.outputs.img_version }} secrets: veracode_api_id: ${{ secrets.VERACODE_API_ID_COLL }} veracode_api_key: ${{ secrets.VERACODE_API_KEY_COLL }} veracode_srcclr_token: ${{ secrets.VERACODE_SRCCLR_TOKEN }} - docker_registry_id: ${{ secrets.DOCKER_REGISTRY_ID }} - docker_registry_passwd: ${{ secrets.DOCKER_REGISTRY_PASSWD }} + docker_registry_id: ${{ secrets.HARBOR_CENTREON_PULL_USERNAME }} + docker_registry_passwd: ${{ secrets.HARBOR_CENTREON_PULL_TOKEN }} package: - needs: [get-version] + needs: [get-environment] uses: ./.github/workflows/package-collect.yml with: - major_version: ${{ needs.get-version.outputs.major_version }} - minor_version: ${{ needs.get-version.outputs.minor_version }} - img_version: ${{ needs.get-version.outputs.img_version }} - release: ${{ needs.get-version.outputs.release }} + major_version: ${{ needs.get-environment.outputs.major_version }} + minor_version: ${{ needs.get-environment.outputs.minor_version }} + img_version: ${{ needs.get-environment.outputs.img_version }} + release: ${{ needs.get-environment.outputs.release }} commit_hash: ${{ github.sha }} - stability: ${{ needs.get-version.outputs.stability }} + stability: ${{ needs.get-environment.outputs.stability }} secrets: inherit robot-test: - needs: [get-version, package] + needs: [get-environment, package] strategy: fail-fast: false @@ -90,21 +90,21 @@ jobs: distrib: ${{ matrix.distrib }} arch: ${{ matrix.arch }} image: ${{ matrix.image }} - image_test: ${{ matrix.image }}:${{ needs.get-version.outputs.test_img_version }} - image_version: ${{ needs.get-version.outputs.img_version }} + image_test: ${{ matrix.image }}:${{ needs.get-environment.outputs.test_img_version }} + image_version: ${{ needs.get-environment.outputs.img_version }} package_cache_key: ${{ github.run_id }}-${{ github.sha }}-${{ matrix.package_extension}}-centreon-collect-${{ matrix.distrib }}-${{ matrix.arch }}-${{ github.head_ref || github.ref_name }} package_cache_path: ./*.${{ matrix.package_extension}} database_type: ${{ matrix.database_type }} test_group_name: ${{matrix.test_group_name}} secrets: - registry_username: ${{ secrets.DOCKER_REGISTRY_ID }} - registry_password: ${{ secrets.DOCKER_REGISTRY_PASSWD }} + registry_username: ${{ secrets.HARBOR_CENTREON_PULL_USERNAME }} + registry_password: ${{ secrets.HARBOR_CENTREON_PULL_TOKEN }} collect_s3_access_key: ${{ secrets.COLLECT_S3_ACCESS_KEY }} collect_s3_secret_key: ${{ secrets.COLLECT_S3_SECRET_KEY }} deliver-rpm: - if: ${{ contains(fromJson('["unstable"]'), needs.get-version.outputs.stability) }} - needs: [robot-test, get-version] + if: ${{ contains(fromJson('["unstable"]'), needs.get-environment.outputs.stability) }} + needs: [robot-test, get-environment] runs-on: [self-hosted, common] strategy: matrix: @@ -120,16 +120,16 @@ jobs: with: module_name: collect distrib: ${{ matrix.distrib }} - major_version: ${{ needs.get-version.outputs.major_version }} + major_version: ${{ needs.get-environment.outputs.major_version }} artifactory_token: ${{ secrets.ARTIFACTORY_ACCESS_TOKEN }} cache_key: ${{ github.run_id }}-${{ github.sha }}-rpm-centreon-collect-${{ matrix.distrib }}-amd64-${{ github.head_ref || github.ref_name }} - stability: ${{ needs.get-version.outputs.stability }} - release_type: ${{ needs.get-version.outputs.release_type }} - release_cloud: ${{ needs.get-version.outputs.release_cloud }} + stability: ${{ needs.get-environment.outputs.stability }} + release_type: ${{ needs.get-environment.outputs.release_type }} + is_cloud: ${{ needs.get-environment.outputs.is_cloud }} deliver-deb: - if: ${{ contains(fromJson('["unstable"]'), needs.get-version.outputs.stability) }} - needs: [robot-test, get-version] + if: ${{ contains(fromJson('["unstable"]'), needs.get-environment.outputs.stability) }} + needs: [robot-test, get-environment] runs-on: [self-hosted, common] strategy: matrix: @@ -149,9 +149,9 @@ jobs: with: module_name: collect distrib: ${{ matrix.distrib }} - major_version: ${{ needs.get-version.outputs.major_version }} + major_version: ${{ needs.get-environment.outputs.major_version }} artifactory_token: ${{ secrets.ARTIFACTORY_ACCESS_TOKEN }} cache_key: ${{ github.run_id }}-${{ github.sha }}-deb-centreon-collect-${{ matrix.distrib }}-${{ matrix.arch }}-${{ github.head_ref || github.ref_name }} - stability: ${{ needs.get-version.outputs.stability }} - release_type: ${{ needs.get-version.outputs.release_type }} - release_cloud: ${{ needs.get-version.outputs.release_cloud }} + stability: ${{ needs.get-environment.outputs.stability }} + release_type: ${{ needs.get-environment.outputs.release_type }} + is_cloud: ${{ needs.get-environment.outputs.is_cloud }} diff --git a/.github/workflows/robot-test.yml b/.github/workflows/robot-test.yml index c77c75bee63..3d6c8d7470c 100644 --- a/.github/workflows/robot-test.yml +++ b/.github/workflows/robot-test.yml @@ -41,7 +41,7 @@ on: jobs: test-image-to-cache: - runs-on: ${{ contains(inputs.image, 'arm') && fromJson('["self-hosted", "collect-arm64"]') || 'ubuntu-22.04' }} + runs-on: ${{ contains(inputs.image, 'arm') && fromJson('["self-hosted", "collect-arm64"]') || 'ubuntu-24.04' }} steps: - name: Checkout sources uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 @@ -78,7 +78,7 @@ jobs: robot-test-list: needs: [test-image-to-cache] - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 outputs: features: ${{ steps.list-features.outputs.features }} @@ -94,7 +94,7 @@ jobs: robot-test: needs: [robot-test-list] - runs-on: ${{ contains(inputs.image, 'arm') && fromJson('["self-hosted", "collect-arm64"]') || 'ubuntu-22.04' }} + runs-on: ${{ contains(inputs.image, 'arm') && fromJson('["self-hosted", "collect-arm64"]') || 'ubuntu-24.04' }} strategy: fail-fast: false @@ -174,7 +174,7 @@ jobs: robot-test-report: needs: [robot-test] if: ${{ failure() }} - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 diff --git a/.github/workflows/veracode-analysis.yml b/.github/workflows/veracode-analysis.yml index 96a8c2e4053..d90359e4b14 100644 --- a/.github/workflows/veracode-analysis.yml +++ b/.github/workflows/veracode-analysis.yml @@ -32,7 +32,7 @@ on: jobs: routing: name: Check before analysis - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 outputs: development_stage: ${{ steps.routing-mode.outputs.development_stage }} skip_analysis: ${{ steps.routing-mode.outputs.skip_analysis }} @@ -166,7 +166,7 @@ jobs: name: Sandbox scan needs: [routing, build] if: needs.routing.outputs.development_stage != 'Development' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Promote latest scan diff --git a/.version b/.version index 4a6bd95255d..c0add3b47f7 100644 --- a/.version +++ b/.version @@ -1,2 +1,2 @@ MAJOR=23.10 -MINOR=11 +MINOR=12 diff --git a/CMakeLists.txt b/CMakeLists.txt index d0ef8133101..b5c314357a1 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -122,7 +122,7 @@ endif() # Version. set(COLLECT_MAJOR 23) set(COLLECT_MINOR 10) -set(COLLECT_PATCH 11) +set(COLLECT_PATCH 12) set(COLLECT_VERSION "${COLLECT_MAJOR}.${COLLECT_MINOR}.${COLLECT_PATCH}") add_definitions(-DCENTREON_CONNECTOR_VERSION=\"${COLLECT_VERSION}\") diff --git a/broker/CMakeLists.txt b/broker/CMakeLists.txt index 8b32553ef1b..5709120cc24 100644 --- a/broker/CMakeLists.txt +++ b/broker/CMakeLists.txt @@ -516,7 +516,7 @@ add_subdirectory(core/sql) # Generator module. add_broker_module(GENERATOR OFF) add_broker_module(STATS ON) -add_broker_module(STATS_EXPORTER ON) +#add_broker_module(STATS_EXPORTER ON) add_broker_module(NEB ON) add_broker_module(RRD ON) add_broker_module(UNIFIED_SQL ON) diff --git a/broker/core/sql/src/mysql_connection.cc b/broker/core/sql/src/mysql_connection.cc index 78aea399801..cbc4766ef27 100644 --- a/broker/core/sql/src/mysql_connection.cc +++ b/broker/core/sql/src/mysql_connection.cc @@ -17,6 +17,7 @@ */ #include +#include #include "com/centreon/broker/config/applier/init.hh" #include "com/centreon/broker/log_v2.hh" @@ -464,18 +465,26 @@ void mysql_connection::_statement(mysql_task* t) { "mysql_connection {:p}: execute statement {} attempt {}: {}", static_cast(this), task->statement_id, attempts, query); if (mysql_stmt_execute(stmt)) { - std::string err_msg( - fmt::format("{} errno={} {}", mysql_error::msg[task->error_code], - ::mysql_errno(_conn), ::mysql_stmt_error(stmt))); - SPDLOG_LOGGER_ERROR(log_v2::sql(), - "connection fail to execute statement {:p}: {}", - static_cast(this), err_msg); - if (_server_error(::mysql_stmt_errno(stmt))) { + int32_t err_code = ::mysql_stmt_errno(stmt); + std::string err_msg(fmt::format("{} errno={} {}", + mysql_error::msg[task->error_code], + err_code, ::mysql_stmt_error(stmt))); + if (err_code == 0) { + SPDLOG_LOGGER_ERROR(log_v2::sql(), + "mysql_connection: errno=0, so we simulate a " + "server error CR_SERVER_LOST"); + err_code = CR_SERVER_LOST; + } else { + SPDLOG_LOGGER_ERROR(log_v2::sql(), + "connection fail to execute statement {:p}: {}", + static_cast(this), err_msg); + } + if (_server_error(err_code)) { set_error_message(err_msg); break; } - if (mysql_stmt_errno(stmt) != 1213 && - mysql_stmt_errno(stmt) != 1205) // Dead Lock error + if (err_code != ER_LOCK_DEADLOCK && + err_code != ER_LOCK_WAIT_TIMEOUT) // Dead Lock error attempts = MAX_ATTEMPTS; if (mysql_commit(_conn)) { diff --git a/broker/core/sql/src/mysql_multi_insert.cc b/broker/core/sql/src/mysql_multi_insert.cc index cafc020e386..7d375cb82cd 100644 --- a/broker/core/sql/src/mysql_multi_insert.cc +++ b/broker/core/sql/src/mysql_multi_insert.cc @@ -132,7 +132,11 @@ void bulk_or_multi::execute(mysql& connexion, my_error::code ec, int thread_id) { if (_bulk_stmt) { - if (!_bulk_bind->empty()) { + /* If the database connection is lost, we can have this issue */ + if (!_bulk_bind) { + _bulk_bind = _bulk_stmt->create_bind(); + _bulk_bind->reserve(_bulk_row); + } else if (!_bulk_bind->empty()) { _bulk_stmt->set_bind(std::move(_bulk_bind)); connexion.run_statement(*_bulk_stmt, ec, thread_id); _bulk_bind = _bulk_stmt->create_bind(); diff --git a/broker/lua/inc/com/centreon/broker/lua/macro_cache.hh b/broker/lua/inc/com/centreon/broker/lua/macro_cache.hh index dbd9cd3822a..397ee4a2603 100644 --- a/broker/lua/inc/com/centreon/broker/lua/macro_cache.hh +++ b/broker/lua/inc/com/centreon/broker/lua/macro_cache.hh @@ -27,6 +27,7 @@ #include "com/centreon/broker/neb/host_group.hh" #include "com/centreon/broker/neb/host_group_member.hh" #include "com/centreon/broker/neb/instance.hh" +#include "com/centreon/broker/neb/internal.hh" #include "com/centreon/broker/neb/service.hh" #include "com/centreon/broker/neb/service_group.hh" #include "com/centreon/broker/neb/service_group_member.hh" @@ -43,7 +44,13 @@ class macro_cache { std::shared_ptr _cache; absl::flat_hash_map> _instances; absl::flat_hash_map> _hosts; - absl::flat_hash_map> _host_groups; + /* The host groups cache stores also a set with the pollers telling they need + * the cache. So if no more poller needs a host group, we can remove it from + * the cache. */ + absl::flat_hash_map, + absl::flat_hash_set>> + _host_groups; absl::btree_map, std::shared_ptr> _host_group_members; @@ -51,7 +58,12 @@ class macro_cache { _custom_vars; absl::flat_hash_map, std::shared_ptr> _services; - absl::flat_hash_map> + /* The service groups cache stores also a set with the pollers telling they + * need the cache. So if no more poller needs a service group, we can remove + * it from the cache. */ + absl::flat_hash_map, + absl::flat_hash_set>> _service_groups; absl::btree_map, std::shared_ptr> diff --git a/broker/lua/src/broker_utils.cc b/broker/lua/src/broker_utils.cc index 9f48eca87b5..b4eb5b63437 100644 --- a/broker/lua/src/broker_utils.cc +++ b/broker/lua/src/broker_utils.cc @@ -24,7 +24,7 @@ #include "absl/strings/string_view.h" #include "com/centreon/broker/config/applier/state.hh" -#include +#include #include #include #include @@ -775,6 +775,30 @@ static int l_broker_stat(lua_State* L) { } } +static void md5_message(const unsigned char* message, + size_t message_len, + unsigned char** digest, + unsigned int* digest_len) { + EVP_MD_CTX* mdctx; + if ((mdctx = EVP_MD_CTX_new()) == nullptr) { + log_v2::lua()->error("lua: fail to call MD5 (EVP_MD_CTX_new call)"); + } + if (1 != EVP_DigestInit_ex(mdctx, EVP_md5(), nullptr)) { + log_v2::lua()->error("lua: fail to call MD5 (EVP_DigestInit_ex call)"); + } + if (1 != EVP_DigestUpdate(mdctx, message, message_len)) { + log_v2::lua()->error("lua: fail to call MD5 (EVP_DigestUpdate call)"); + } + if ((*digest = (unsigned char*)OPENSSL_malloc(EVP_MD_size(EVP_md5()))) == + nullptr) { + log_v2::lua()->error("lua: fail to call MD5 (OPENSSL_malloc call)"); + } + if (1 != EVP_DigestFinal_ex(mdctx, *digest, digest_len)) { + log_v2::lua()->error("lua: fail to call MD5 (EVP_DigestFinal_ex call)"); + } + EVP_MD_CTX_free(mdctx); +} + static int l_broker_md5(lua_State* L) { auto digit = [](unsigned char d) -> char { if (d < 10) @@ -785,11 +809,12 @@ static int l_broker_md5(lua_State* L) { size_t len; const unsigned char* str = reinterpret_cast(lua_tolstring(L, -1, &len)); - unsigned char md5[MD5_DIGEST_LENGTH]; - MD5(str, len, md5); - char result[2 * MD5_DIGEST_LENGTH + 1]; + unsigned char* md5; + uint32_t md5_len; + md5_message(str, len, &md5, &md5_len); + char result[2 * md5_len + 1]; char* tmp = result; - for (int i = 0; i < MD5_DIGEST_LENGTH; i++) { + for (uint32_t i = 0; i < md5_len; i++) { *tmp = digit(md5[i] >> 4); ++tmp; *tmp = digit(md5[i] & 0xf); @@ -797,6 +822,7 @@ static int l_broker_md5(lua_State* L) { } *tmp = 0; lua_pushstring(L, result); + OPENSSL_free(md5); return 1; } diff --git a/broker/lua/src/macro_cache.cc b/broker/lua/src/macro_cache.cc index 53a0bf9cad8..22a58ecb322 100644 --- a/broker/lua/src/macro_cache.cc +++ b/broker/lua/src/macro_cache.cc @@ -368,12 +368,15 @@ macro_cache::get_host_group_members() const { * * @return The name of the host group. */ -std::string const& macro_cache::get_host_group_name(uint64_t id) const { - auto const found = _host_groups.find(id); +const std::string& macro_cache::get_host_group_name(uint64_t id) const { + const auto found = _host_groups.find(id); - if (found == _host_groups.end()) + if (found == _host_groups.end()) { + SPDLOG_LOGGER_ERROR(log_v2::lua(), + "lua: could not find information on host group {}", id); throw msg_fmt("lua: could not find information on host group {}", id); - return found->second->name; + } + return found->second.first->name; } /** @@ -424,9 +427,13 @@ macro_cache::get_service_group_members() const { std::string const& macro_cache::get_service_group_name(uint64_t id) const { auto found = _service_groups.find(id); - if (found == _service_groups.end()) + if (found == _service_groups.end()) { + SPDLOG_LOGGER_ERROR(log_v2::lua(), + "lua: could not find information on service group {}", + id); throw msg_fmt("lua: could not find information on service group {}", id); - return found->second->name; + } + return found->second.first->name; } /** @@ -809,14 +816,36 @@ void macro_cache::_process_pb_adaptive_host( * @param data The event. */ void macro_cache::_process_host_group(std::shared_ptr const& data) { - std::shared_ptr const& hg = + const std::shared_ptr& hg = std::static_pointer_cast(data); SPDLOG_LOGGER_DEBUG(log_v2::lua(), "lua: processing host group '{}' of id {} enabled: {}", hg->name, hg->id, hg->enabled); - if (hg->enabled) - _host_groups[hg->id] = hg; - // erasure is desactivated because a group cen be owned by several pollers + if (hg->enabled) { + auto found = _host_groups.find(hg->id); + if (found != _host_groups.end()) { + /* here, we complete the set of pollers */ + found->second.second.insert(hg->poller_id); + found->second.first->name = hg->name; + } else { + /* Here, we add the hostgroup and the first poller that needs it */ + absl::flat_hash_set pollers{hg->poller_id}; + _host_groups[hg->id] = std::make_pair(hg, pollers); + } + } else { + /* We check that no more pollers need this host group. So if the set is + * empty, we can also remove the host group. */ + auto found = _host_groups.find(hg->id); + if (found != _host_groups.end()) { + auto f = found->second.second.find(hg->poller_id); + if (f != found->second.second.end()) { + found->second.second.erase(f); + if (found->second.second.empty()) { + _host_groups.erase(found); + } + } + } + } } /** @@ -1054,9 +1083,31 @@ void macro_cache::_process_service_group( SPDLOG_LOGGER_DEBUG(log_v2::lua(), "lua: processing service group '{}' of id {}", sg->name, sg->id); - if (sg->enabled) - _service_groups[sg->id] = sg; - // erasure is desactivated because a group cen be owned by several pollers + if (sg->enabled) { + auto found = _service_groups.find(sg->id); + if (found != _service_groups.end()) { + /* here, we complete the set of pollers */ + found->second.second.insert(sg->poller_id); + found->second.first->name = sg->name; + } else { + /* Here, we add the servicegroup and the first poller that needs it */ + absl::flat_hash_set pollers{sg->poller_id}; + _service_groups[sg->id] = std::make_pair(sg, pollers); + } + } else { + /* We check that no more pollers need this service group. So if the set is + * empty, we can also remove the service group. */ + auto found = _service_groups.find(sg->id); + if (found != _service_groups.end()) { + auto f = found->second.second.find(sg->poller_id); + if (f != found->second.second.end()) { + found->second.second.erase(f); + if (found->second.second.empty()) { + _service_groups.erase(found); + } + } + } + } } /** @@ -1327,8 +1378,13 @@ void macro_cache::_save_to_disk() { for (auto it(_hosts.begin()), end(_hosts.end()); it != end; ++it) _cache->add(it->second); - for (auto it(_host_groups.begin()), end(_host_groups.end()); it != end; ++it) - _cache->add(it->second); + for (auto it = _host_groups.begin(), end = _host_groups.end(); it != end; + ++it) { + for (auto poller_id : it->second.second) { + it->second.first->id = poller_id; + _cache->add(it->second.first); + } + } for (auto it(_host_group_members.begin()), end(_host_group_members.end()); it != end; ++it) @@ -1337,9 +1393,13 @@ void macro_cache::_save_to_disk() { for (auto it(_services.begin()), end(_services.end()); it != end; ++it) _cache->add(it->second); - for (auto it(_service_groups.begin()), end(_service_groups.end()); it != end; - ++it) - _cache->add(it->second); + for (auto it = _service_groups.begin(), end = _service_groups.end(); + it != end; ++it) { + for (auto poller_id : it->second.second) { + it->second.first->id = poller_id; + _cache->add(it->second.first); + } + } for (auto it = _service_group_members.begin(), end = _service_group_members.end(); diff --git a/broker/lua/test/lua.cc b/broker/lua/test/lua.cc index 8e87a730498..4a95bc6dbd2 100644 --- a/broker/lua/test/lua.cc +++ b/broker/lua/test/lua.cc @@ -1437,6 +1437,7 @@ TEST_F(LuaTest, ServiceGroupCacheTestName) { auto sg{std::make_shared()}; sg->id = 28; sg->name = "centreon"; + sg->enabled = true; _cache->write(sg); CreateScript(filename, diff --git a/conanfile.txt b/conanfile.txt index b03f140fb83..d51cbd820e1 100644 --- a/conanfile.txt +++ b/conanfile.txt @@ -9,7 +9,6 @@ libssh2/1.10.0 mariadb-connector-c/3.3.3 nlohmann_json/3.11.2 openssl/1.1.1t -opentelemetry-cpp/1.14.2 protobuf/3.21.9 rapidjson/cci.20230929 rapidyaml/0.5.0 diff --git a/engine/enginerpc/CMakeLists.txt b/engine/enginerpc/CMakeLists.txt index 30a6254174d..f2c0e034c01 100644 --- a/engine/enginerpc/CMakeLists.txt +++ b/engine/enginerpc/CMakeLists.txt @@ -66,7 +66,7 @@ add_library( # Headers. "${INC_DIR}/engine_impl.hh" "${INC_DIR}/enginerpc.hh") -add_dependencies(${ENGINERPC} centreon_common) +add_dependencies(${ENGINERPC} centreon_common engine_rpc) target_precompile_headers(${ENGINERPC} PRIVATE precomp_inc/precomp.hh) diff --git a/packaging/centreon-broker-stats-exporter-debuginfo.yaml b/packaging/centreon-broker-stats-exporter-debuginfo.yaml deleted file mode 100644 index 90575a1659f..00000000000 --- a/packaging/centreon-broker-stats-exporter-debuginfo.yaml +++ /dev/null @@ -1,40 +0,0 @@ -name: "centreon-broker-stats-exporter-debuginfo" -arch: "${ARCH}" -platform: "linux" -version_schema: "none" -version: "${VERSION}" -release: "${RELEASE}${DIST}" -section: "default" -priority: "optional" -maintainer: "Centreon " -description: | - Debuginfo package for centreon-broker-stats-exporter. - Commit: @COMMIT_HASH@ -vendor: "Centreon" -homepage: "https://www.centreon.com" -license: "Apache-2.0" - -contents: - - src: "../lib/15-stats_exporter.so.debug" - dst: "/usr/lib/debug/usr/share/centreon/lib/centreon-broker/" - -overrides: - rpm: - depends: - - centreon-broker-stats-exporter = ${VERSION}-${RELEASE}${DIST} - deb: - depends: - - centreon-broker-stats-exporter (= ${VERSION}-${RELEASE}${DIST}) - conflicts: - - centreon-broker-stats-exporter-dbgsym - replaces: - - centreon-broker-stats-exporter-dbgsym - provides: - - centreon-broker-stats-exporter-dbgsym - -rpm: - summary: Debuginfo package for centreon-broker-stats-exporter. - compression: zstd - signature: - key_file: ${RPM_SIGNING_KEY_FILE} - key_id: ${RPM_SIGNING_KEY_ID} diff --git a/packaging/centreon-broker-stats-exporter.yaml b/packaging/centreon-broker-stats-exporter.yaml deleted file mode 100644 index 81565d91afd..00000000000 --- a/packaging/centreon-broker-stats-exporter.yaml +++ /dev/null @@ -1,36 +0,0 @@ -name: "centreon-broker-stats-exporter" -arch: "${ARCH}" -platform: "linux" -version_schema: "none" -version: "${VERSION}" -release: "${RELEASE}${DIST}" -section: "default" -priority: "optional" -maintainer: "Centreon " -description: | - This module of Centreon Broker allows you to send broker statistics to - an Opentelemetry Exporter. It can be configured to use gRPC or http - protocols. - Commit: @COMMIT_HASH@ -vendor: "Centreon" -homepage: "https://www.centreon.com" -license: "Apache-2.0" - -contents: - - src: "../lib/15-stats_exporter.so" - dst: "/usr/share/centreon/lib/centreon-broker/" - -overrides: - rpm: - depends: - - centreon-broker-core = ${VERSION}-${RELEASE}${DIST} - deb: - depends: - - centreon-broker-core (= ${VERSION}-${RELEASE}${DIST}) - -rpm: - summary: Export Centreon Statistics to an OpenTelemetry Exporter. - compression: zstd - signature: - key_file: ${RPM_SIGNING_KEY_FILE} - key_id: ${RPM_SIGNING_KEY_ID} diff --git a/tests/broker-engine/services-and-bulk-stmt.robot b/tests/broker-engine/services-and-bulk-stmt.robot index e3d2fc38675..2d03b5d74e5 100644 --- a/tests/broker-engine/services-and-bulk-stmt.robot +++ b/tests/broker-engine/services-and-bulk-stmt.robot @@ -63,6 +63,7 @@ EBBPS1 IF "${output}" == "((0,),)" BREAK END Should Be Equal As Strings ${output} ((0,),) + Disconnect From Database FOR ${i} IN RANGE ${1000} Ctn Process Service Check Result host_1 service_${i+1} 2 warning${i} @@ -100,6 +101,7 @@ EBBPS1 IF "${output}" == "((0,),)" BREAK END Should Be Equal As Strings ${output} ((0,),) + Disconnect From Database EBBPS2 [Documentation] 1000 service check results are sent to the poller. The test is done with the unified_sql stream, no service status is lost, we find the 1000 results in the database: table services. @@ -146,6 +148,7 @@ EBBPS2 IF "${output}" == "((0,),)" BREAK END Should Be Equal As Strings ${output} ((0,),) + Disconnect From Database FOR ${i} IN RANGE ${1000} Ctn Process Service Check Result host_1 service_${i+1} 2 critical${i} @@ -182,6 +185,7 @@ EBBPS2 IF "${output}" == "((0,),)" BREAK END Should Be Equal As Strings ${output} ((0,),) + Disconnect From Database EBMSSM [Documentation] 1000 services are configured with 100 metrics each. The rrd output is removed from the broker configuration. GetSqlManagerStats is called to measure writes into data_bin. @@ -228,6 +232,7 @@ EBMSSM Sleep 1s END Should Be True ${output[0][0]} >= 100000 + Disconnect From Database EBPS2 [Documentation] 1000 services are configured with 20 metrics each. The rrd output is removed from the broker configuration to avoid to write too many rrd files. While metrics are written in bulk, the database is stopped. This must not crash broker. @@ -390,6 +395,142 @@ metric_mapping ${grep_res} Grep File /tmp/test.log name: metric1 corresponds to metric id Should Not Be Empty ${grep_res} metric name "metric1" not found +EBMSSMDBD + [Documentation] 1000 services are configured with 100 metrics each. + ... The rrd output is removed from the broker configuration. + ... While metrics are written in the database, we stop the database and then restart it. + ... Broker must recover its connection to the database and continue to write metrics. + [Tags] broker engine unified_sql MON-153320 + Ctn Clear Metrics + Ctn Config Engine ${1} ${1} ${1000} + # We want all the services to be passive to avoid parasite checks during our test. + Ctn Set Services Passive ${0} service_.* + Ctn Config Broker central + Ctn Config Broker rrd + Ctn Config Broker module ${1} + Ctn Config BBDO3 1 + Ctn Broker Config Log central core error + Ctn Broker Config Log central tcp error + Ctn Broker Config Log central sql debug + Ctn Config Broker Sql Output central unified_sql + Ctn Config Broker Remove Rrd Output central + Ctn Clear Retention + ${start} Get Current Date + Ctn Start Broker + Ctn Start Engine + + Ctn Wait For Engine To Be Ready ${start} 1 + + ${start} Ctn Get Round Current Date + # Let's wait for one "INSERT INTO data_bin" to appear in stats. + Log To Console Many service checks with 100 metrics each are processed. + FOR ${i} IN RANGE ${1000} + Ctn Process Service Check Result With Metrics host_1 service_${i+1} 1 warning${i} 100 + END + + Log To Console We wait for at least one metric to be written in the database. + # Let's wait for all force checks to be in the storage database. + Connect To Database pymysql ${DBName} ${DBUser} ${DBPass} ${DBHost} ${DBPort} + FOR ${i} IN RANGE ${500} + ${output} Query + ... SELECT COUNT(s.last_check) FROM metrics m LEFT JOIN index_data i ON m.index_id = i.id LEFT JOIN services s ON s.host_id = i.host_id AND s.service_id = i.service_id WHERE metric_name LIKE "metric_%%" AND s.last_check >= ${start} + IF ${output[0][0]} >= 1 BREAK + Sleep 1s + END + Disconnect From Database + + Log To Console Let's start some database manipulation... + ${start} Get Current Date + + FOR ${i} IN RANGE ${3} + Ctn Stop Mysql + Sleep 10s + Ctn Start Mysql + ${content} Create List could not insert data in data_bin + ${result} Ctn Find In Log With Timeout ${centralLog} ${start} ${content} 10 + Log To Console ${result} + END + +EBMSSMPART + [Documentation] 1000 services are configured with 100 metrics each. + ... The rrd output is removed from the broker configuration. + ... The data_bin table is configured with two partitions p1 and p2 such + ... that p1 contains old data and p2 contains current data. + ... While metrics are written in the database, we remove the p2 partition. + ... Once the p2 partition is recreated, broker must recover its connection + ... to the database and continue to write metrics. + ... To check that last point, we force a last service check and we check + ... that its metrics are written in the database. + [Tags] broker engine unified_sql MON-153320 + Ctn Clear Metrics + Ctn Config Engine ${1} ${1} ${1000} + # We want all the services to be passive to avoid parasite checks during our test. + Ctn Set Services Passive ${0} service_.* + Ctn Config Broker central + Ctn Config Broker rrd + Ctn Config Broker module ${1} + Ctn Config BBDO3 1 + Ctn Broker Config Log central core error + Ctn Broker Config Log central tcp error + Ctn Broker Config Log central sql trace + Ctn Config Broker Sql Output central unified_sql + Ctn Config Broker Remove Rrd Output central + Ctn Clear Retention + + Ctn Prepare Partitions For Data Bin + ${start} Get Current Date + Ctn Start Broker + Ctn Start Engine + + Ctn Wait For Engine To Be Ready ${start} 1 + + ${start} Ctn Get Round Current Date + # Let's wait for one "INSERT INTO data_bin" to appear in stats. + Log To Console Many service checks with 100 metrics each are processed. + FOR ${i} IN RANGE ${1000} + Ctn Process Service Check Result With Metrics host_1 service_${i+1} 1 warning${i} 100 + END + + Log To Console We wait for at least one metric to be written in the database. + # Let's wait for all force checks to be in the storage database. + Connect To Database pymysql ${DBName} ${DBUser} ${DBPass} ${DBHost} ${DBPort} + FOR ${i} IN RANGE ${500} + ${output} Query + ... SELECT COUNT(s.last_check) FROM metrics m LEFT JOIN index_data i ON m.index_id = i.id LEFT JOIN services s ON s.host_id = i.host_id AND s.service_id = i.service_id WHERE metric_name LIKE "metric_%%" AND s.last_check >= ${start} + IF ${output[0][0]} >= 1 BREAK + Sleep 1s + END + Disconnect From Database + + Log To Console Let's start some database manipulation... + Ctn Remove P2 From Data Bin + ${start} Get Current Date + + ${content} Create List errno= + FOR ${i} IN RANGE ${6} + ${result} Ctn Find In Log With Timeout ${centralLog} ${start} ${content} 10 + IF ${result} BREAK + END + + Log To Console Let's recreate the p2 partition... + Ctn Add P2 To Data Bin + + ${start} Ctn Get Round Current Date + Ctn Process Service Check Result With Metrics host_1 service_1 0 Last Output OK 100 + + Log To Console Let's wait for the last service check to be in the database... + Connect To Database pymysql ${DBName} ${DBUser} ${DBPass} ${DBHost} ${DBPort} + FOR ${i} IN RANGE ${120} + ${output} Query SELECT count(*) FROM data_bin WHERE ctime >= ${start} - 10 + Log To Console ${output} + IF ${output[0][0]} >= 100 BREAK + Sleep 1s + END + Log To Console ${output} + Should Be True ${output[0][0]} >= 100 + Disconnect From Database + + *** Keywords *** Ctn Test Clean Ctn Stop Engine diff --git a/tests/resources/Broker.py b/tests/resources/Broker.py index 92b7ea6e11b..36ad5c36249 100755 --- a/tests/resources/Broker.py +++ b/tests/resources/Broker.py @@ -2734,3 +2734,100 @@ def ctn_broker_get_ba(port: int, ba_id: int, output_file: str, timeout=TIMEOUT): except: logger.console("gRPC server not ready") return res + + +def ctn_prepare_partitions_for_data_bin(): + """ + Create two partitions for the data_bin table. + The first one named p1 contains data with ctime older than now - 60. + The second one named p2 contains data with ctime older than now + 3600. + """ + connection = pymysql.connect(host=DB_HOST, + user=DB_USER, + password=DB_PASS, + database=DB_NAME_STORAGE, + charset='utf8mb4', + cursorclass=pymysql.cursors.DictCursor) + + now = int(time.time()) + before = now - 60 + after = now + 3600 + with connection: + with connection.cursor() as cursor: + cursor.execute("DROP TABLE IF EXISTS data_bin") + sql = f"""CREATE TABLE `data_bin` ( + `id_metric` int(11) DEFAULT NULL, + `ctime` int(11) DEFAULT NULL, + `value` float DEFAULT NULL, + `status` enum('0','1','2','3','4') DEFAULT NULL, + KEY `index_metric` (`id_metric`) +) ENGINE=InnoDB DEFAULT CHARSET=latin1 + PARTITION BY RANGE (`ctime`) +(PARTITION `p1` VALUES LESS THAN ({before}) ENGINE = InnoDB, + PARTITION `p2` VALUES LESS THAN ({after}) ENGINE = InnoDB)""" + cursor.execute(sql) + connection.commit() + + +def ctn_remove_p2_from_data_bin(): + """ + Remove the partition p2 from the data_bin table. + """ + connection = pymysql.connect(host=DB_HOST, + user=DB_USER, + password=DB_PASS, + database=DB_NAME_STORAGE, + charset='utf8mb4', + cursorclass=pymysql.cursors.DictCursor) + + with connection: + with connection.cursor() as cursor: + cursor.execute("ALTER TABLE data_bin DROP PARTITION p2") + connection.commit() + + +def ctn_add_p2_to_data_bin(): + """ + Add the partition p2 the the data_bin table. + """ + connection = pymysql.connect(host=DB_HOST, + user=DB_USER, + password=DB_PASS, + database=DB_NAME_STORAGE, + charset='utf8mb4', + cursorclass=pymysql.cursors.DictCursor) + + after = int(time.time()) + 3600 + with connection: + with connection.cursor() as cursor: + cursor.execute( + f"ALTER TABLE data_bin ADD PARTITION (PARTITION p2 VALUES LESS THAN ({after}))") + connection.commit() + + +def ctn_init_data_bin_without_partition(): + """ + Recreate the data_bin table without partition. + """ + connection = pymysql.connect(host=DB_HOST, + user=DB_USER, + password=DB_PASS, + database=DB_NAME_STORAGE, + charset='utf8mb4', + cursorclass=pymysql.cursors.DictCursor) + + now = int(time.time()) + before = now - 60 + after = now + 3600 + with connection: + with connection.cursor() as cursor: + cursor.execute("DROP TABLE IF EXISTS data_bin") + sql = f"""CREATE TABLE `data_bin` ( + `id_metric` int(11) DEFAULT NULL, + `ctime` int(11) DEFAULT NULL, + `value` float DEFAULT NULL, + `status` enum('0','1','2','3','4') DEFAULT NULL, + KEY `index_metric` (`id_metric`) +) ENGINE=InnoDB DEFAULT CHARSET=latin1""" + cursor.execute(sql) + connection.commit() diff --git a/tests/resources/resources.robot b/tests/resources/resources.robot index 259febc2774..1a2f771afaf 100644 --- a/tests/resources/resources.robot +++ b/tests/resources/resources.robot @@ -363,13 +363,14 @@ Ctn Dump Ba On Error Ctn Process Service Result Hard [Arguments] ${host} ${svc} ${state} ${output} - Repeat Keyword - ... 3 times - ... Ctn Process Service Check Result - ... ${host} - ... ${svc} - ... ${state} - ... ${output} + FOR ${idx} IN RANGE 3 + Ctn Process Service Check Result + ... ${host} + ... ${svc} + ... ${state} + ... ${output} + Sleep 1s + END Ctn Wait For Engine To Be Ready [Arguments] ${start} ${nbEngine}=1