diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 000000000..9f38116f7 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,402 @@ +name: Build + +on: + workflow_call: + +env: + CACHE_KEY_BASE: 22 # Increment this value to reset the images cache + CACHE_KEY_FILES: ${{ github.workspace }}/docker-definition + CACHE_KEY_IMAGES: BASE_SEED=$CACHE_KEY_BASE | $CACHE_KEY_FILES + IMAGE_CACHE_PATH: ${{ github.workspace }}/image_cache + SENTINEL_CACHE_PATH: ${{ github.workspace }}/sentinel_cache + +jobs: + juniper: + runs-on: ubuntu-latest + timeout-minutes: 180 + env: + DOCKER_IMAGES_SLUG: juniper + steps: + - uses: actions/checkout@v3 + + - name: Docker images sentinel cache + id: cache-sentinel + uses: actions/cache@v3 + with: + path: $SENTINEL_CACHE_PATH + key: SENTINEL=1 | $DOCKER_IMAGES_SLUG | $CACHE_KEY_IMAGES + + - name: Docker images cache + if: steps.cache-sentinel.outputs.cache-hit != 'true' + uses: actions/cache@v3 + with: + path: $IMAGE_CACHE_PATH + key: BASE_SEED=$CACHE_KEY_BASE | $DOCKER_IMAGES_SLUG | $CACHE_KEY_IMAGES + restore-keys: BASE_SEED=$CACHE_KEY_BASE | $DOCKER_IMAGES_SLUG + + - name: Load images + if: steps.cache-sentinel.outputs.cache-hit != 'true' + run: | + [ -d $IMAGE_CACHE_PATH ] || exit 0 + ls -l $IMAGE_CACHE_PATH + docker images + set -euxo pipefail + cat $IMAGE_CACHE_PATH/edx-$DOCKER_IMAGES_SLUG.tar.xz | unxz | docker load + docker images + + - name: Set needs_build pipeline variable + if: steps.cache-sentinel.outputs.cache-hit != 'true' + run: echo "NEEDS_BUILD=true" >> $GITHUB_ENV + + - name: Remove Haskell compiler and cached apt archives to save disk space + if: env.NEEDS_BUILD != 'false' + run: df -h; time sudo rm -rf /var/cache/apt/archives /opt/ghc; df -h + + - name: Prepare Transifex credentials + if: env.NEEDS_BUILD != 'false' + run: | + if [ "$TRANSIFEX_USERNAME" == '$(TRANSIFEX_USERNAME)' ] || [ "$TRANSIFEX_PASSWORD" == '$(TRANSIFEX_PASSWORD)' ]; then + echo "Transifex credentials unset. Building without translations." + exit 0 + fi + printf '[https://www.transifex.com]\nhostname=https://www.transifex.com\nusername=%s\npassword=%s\n' "${TRANSIFEX_USERNAME}" "${TRANSIFEX_PASSWORD}" > $HOME/.transifexrc + ls -l $GITHUB_WORKSPACE/.transifexrc + env: + TRANSIFEX_USERNAME: $(TRANSIFEX_USERNAME) + TRANSIFEX_PASSWORD: $(TRANSIFEX_PASSWORD) + + - name: Setup docker images cache + if: env.NEEDS_BUILD != 'false' + run: | + images=$(docker images|grep -v ''|grep derex|awk '{print $1 ":" $2}') + CACHE_FROM_OPTS="" + if [ -d $IMAGE_CACHE_PATH ]; then + echo "Will use cached layers from images $images" + for image in $images; do + CACHE_FROM_OPTS="${CACHE_FROM_OPTS} --cache-from=$image" + done + else + mkdir $IMAGE_CACHE_PATH + fi + echo "CACHE_FROM_OPTS=$CACHE_FROM_OPTS" >> $GITHUB_ENV + mkdir $SENTINEL_CACHE_PATH + cp -r $CACHE_KEY_FILES $SENTINEL_CACHE_PATH + # We save the sha of the repo that built this image, so that we can push + # it only in a build of the same commit, after tests are passed + git rev-parse --verify HEAD > $SENTINEL_CACHE_PATH/built_version + + - name: Replace default docker with upsream docker and create build context + if: env.NEEDS_BUILD != 'false' + run: | + set -ex + sudo apt-get remove moby-cli -y + curl -fsSL https://get.docker.com |sudo bash + sudo mv /etc/docker/daemon.json /etc/docker/daemon.json.orig + sudo cat /etc/docker/daemon.json.orig|jq '. + {experimental: true}' |sudo tee /etc/docker/daemon.json + sudo systemctl restart docker.service + docker version + docker buildx create --use + docker images + sudo apt-get install pixz -y + + - uses: actions/setup-python@v4 + with: + python-version: "3.8" + + - name: Cache pip packages + uses: actions/cache@v3 + with: + path: $GITHUB_WORKSPACE/.cache/pip + key: pipcache | requirements_dev.txt + restore-keys: pipcache + + - name: Install derex.runner + if: env.NEEDS_BUILD != 'false' + run: | + pip3 install --cache-dir $GITHUB_WORKSPACE/.cache/pip -U pip setuptools + pip3 install --cache-dir $GITHUB_WORKSPACE/.cache/pip -r requirements_dev.txt -e . + + - name: Build nostatic image + if: env.NEEDS_BUILD != 'false' + run: derex build openedx --docker-opts "${CACHE_FROM_OPTS} --cache-to=type=inline -o type=docker" --target=nostatic $DOCKER_IMAGES_SLUG + + - name: Build dev image + if: env.NEEDS_BUILD != 'false' + run: derex build openedx --docker-opts "--cache-to=type=inline -o type=docker" --target=dev $DOCKER_IMAGES_SLUG + + - name: Check translations + if: env.NEEDS_BUILD != 'false' + run: | + docker images + derex build openedx --docker-opts "${CACHE_FROM_OPTS} --output type=docker,name={docker_image_prefix}-{target}" --target=translations $DOCKER_IMAGES_SLUG + docker images + docker run --rm derex/openedx-$DOCKER_IMAGES_SLUG-translations:$(grep __version__ derex/runner/__init__.py |sed 's/[^"]*"//;s/"//') sh -c "i18n_tool validate || (find conf|grep prob; find conf|grep prob|xargs cat; false)" || echo "Problems with translations found" + + - name: Save images + if: env.NEEDS_BUILD != 'false' + run: | + set -euxo pipefail; + docker save \ + $(derex build openedx --only-print-image-name -t nostatic $DOCKER_IMAGES_SLUG) \ + $(derex build openedx --only-print-image-name -t dev $DOCKER_IMAGES_SLUG) \ + | pixz -0 > $IMAGE_CACHE_PATH/edx-$DOCKER_IMAGES_SLUG.tar.xz + + koa: + runs-on: ubuntu-latest + timeout-minutes: 180 + env: + DOCKER_IMAGES_SLUG: koa + steps: + - uses: actions/checkout@v3 + + - name: Docker images sentinel cache + id: cache-sentinel + uses: actions/cache@v3 + with: + path: $(SENTINEL_CACHE_PATH) + key: SENTINEL=1 | $DOCKER_IMAGES_SLUG | $CACHE_KEY_IMAGES + + - name: Docker images cache + if: steps.cache-sentinel.outputs.cache-hit != 'true' + uses: actions/cache@v3 + with: + path: $IMAGE_CACHE_PATH + key: BASE_SEED=$CACHE_KEY_BASE | $DOCKER_IMAGES_SLUG | $CACHE_KEY_IMAGES + restore-keys: BASE_SEED=$CACHE_KEY_BASE | $DOCKER_IMAGES_SLUG + + - name: Load images + if: steps.cache-sentinel.outputs.cache-hit != 'true' + run: | + [ -d $IMAGE_CACHE_PATH ] || exit 0 + ls -l $IMAGE_CACHE_PATH + docker images + set -euxo pipefail + cat $IMAGE_CACHE_PATH/edx-$DOCKER_IMAGES_SLUG.tar.xz | unxz | docker load + docker images + + - name: Set needs_build pipeline variable + if: steps.cache-sentinel.outputs.cache-hit != 'true' + run: echo "NEEDS_BUILD=true" >> $GITHUB_ENV + + - name: Remove Haskell compiler and cached apt archives to save disk space + if: env.NEEDS_BUILD != 'false' + run: df -h; time sudo rm -rf /var/cache/apt/archives /opt/ghc; df -h + + - name: Prepare Transifex credentials + if: env.NEEDS_BUILD != 'false' + run: | + if [ "$TRANSIFEX_USERNAME" == '$(TRANSIFEX_USERNAME)' ] || [ "$TRANSIFEX_PASSWORD" == '$(TRANSIFEX_PASSWORD)' ]; then + echo "Transifex credentials unset. Building without translations." + exit 0 + fi + printf '[https://www.transifex.com]\nhostname=https://www.transifex.com\nusername=%s\npassword=%s\n' "${TRANSIFEX_USERNAME}" "${TRANSIFEX_PASSWORD}" > $HOME/.transifexrc + ls -l $GITHUB_WORKSPACE/.transifexrc + env: + TRANSIFEX_USERNAME: $(TRANSIFEX_USERNAME) + TRANSIFEX_PASSWORD: $(TRANSIFEX_PASSWORD) + + - name: Setup docker images cache + if: env.NEEDS_BUILD != 'false' + run: | + images=$(docker images|grep -v ''|grep derex|awk '{print $1 ":" $2}') + CACHE_FROM_OPTS="" + if [ -d $IMAGE_CACHE_PATH ]; then + echo "Will use cached layers from images $images" + for image in $images; do + CACHE_FROM_OPTS="${CACHE_FROM_OPTS} --cache-from=$image" + done + else + mkdir $IMAGE_CACHE_PATH + fi + echo "CACHE_FROM_OPTS=$CACHE_FROM_OPTS" >> $GITHUB_ENV + mkdir $SENTINEL_CACHE_PATH + cp -r $CACHE_KEY_FILES $SENTINEL_CACHE_PATH + # We save the sha of the repo that built this image, so that we can push + # it only in a build of the same commit, after tests are passed + git rev-parse --verify HEAD > $SENTINEL_CACHE_PATH/built_version + + - name: Replace default docker with upsream docker and create build context + if: env.NEEDS_BUILD != 'false' + run: | + set -ex + sudo apt-get remove moby-cli -y + curl -fsSL https://get.docker.com |sudo bash + sudo mv /etc/docker/daemon.json /etc/docker/daemon.json.orig + sudo cat /etc/docker/daemon.json.orig|jq '. + {experimental: true}' |sudo tee /etc/docker/daemon.json + sudo systemctl restart docker.service + docker version + docker buildx create --use + docker images + sudo apt-get install pixz -y + + - uses: actions/setup-python@v4 + with: + python-version: "3.8" + + - name: Cache pip packages + uses: actions/cache@v3 + with: + path: $GITHUB_WORKSPACE/.cache/pip + key: pipcache | requirements_dev.txt + restore-keys: pipcache + + - name: Install derex.runner + if: env.NEEDS_BUILD != 'false' + run: | + pip3 install --cache-dir $GITHUB_WORKSPACE/.cache/pip -U pip setuptools + pip3 install --cache-dir $GITHUB_WORKSPACE/.cache/pip -r requirements_dev.txt -e . + + - name: Build nostatic image + if: env.NEEDS_BUILD != 'false' + run: derex build openedx --docker-opts "${CACHE_FROM_OPTS} --cache-to=type=inline -o type=docker" --target=nostatic $DOCKER_IMAGES_SLUG + + - name: Build dev image + if: env.NEEDS_BUILD != 'false' + run: derex build openedx --docker-opts "--cache-to=type=inline -o type=docker" --target=dev $DOCKER_IMAGES_SLUG + + - name: Check translations + if: env.NEEDS_BUILD != 'false' + run: | + docker images + derex build openedx --docker-opts "${CACHE_FROM_OPTS} --output type=docker,name={docker_image_prefix}-{target}" --target=translations $DOCKER_IMAGES_SLUG + docker images + docker run --rm derex/openedx-$DOCKER_IMAGES_SLUG-translations:$(grep __version__ derex/runner/__init__.py |sed 's/[^"]*"//;s/"//') sh -c "i18n_tool validate || (find conf|grep prob; find conf|grep prob|xargs cat; false)" || echo "Problems with translations found" + + - name: Save images + if: env.NEEDS_BUILD != 'false' + run: | + set -euxo pipefail; + docker save \ + $(derex build openedx --only-print-image-name -t nostatic $DOCKER_IMAGES_SLUG) \ + $(derex build openedx --only-print-image-name -t dev $DOCKER_IMAGES_SLUG) \ + | pixz -0 > $IMAGE_CACHE_PATH/edx-$DOCKER_IMAGES_SLUG.tar.xz + + lilac: + runs-on: ubuntu-latest + timeout-minutes: 180 + env: + DOCKER_IMAGES_SLUG: lilac + steps: + - uses: actions/checkout@v3 + + - name: Docker images sentinel cache + id: cache-sentinel + uses: actions/cache@v3 + with: + path: $(SENTINEL_CACHE_PATH) + key: SENTINEL=1 | $DOCKER_IMAGES_SLUG | $CACHE_KEY_IMAGES + + - name: Docker images cache + if: steps.cache-sentinel.outputs.cache-hit != 'true' + uses: actions/cache@v3 + with: + path: $IMAGE_CACHE_PATH + key: BASE_SEED=$CACHE_KEY_BASE | $DOCKER_IMAGES_SLUG | $CACHE_KEY_IMAGES + restore-keys: BASE_SEED=$CACHE_KEY_BASE | $DOCKER_IMAGES_SLUG + + - name: Load images + if: steps.cache-sentinel.outputs.cache-hit != 'true' + run: | + [ -d $IMAGE_CACHE_PATH ] || exit 0 + ls -l $IMAGE_CACHE_PATH + docker images + set -euxo pipefail + cat $IMAGE_CACHE_PATH/edx-$DOCKER_IMAGES_SLUG.tar.xz | unxz | docker load + docker images + + - name: Set needs_build pipeline variable + if: steps.cache-sentinel.outputs.cache-hit != 'true' + run: echo "NEEDS_BUILD=true" >> $GITHUB_ENV + + - name: Remove Haskell compiler and cached apt archives to save disk space + if: env.NEEDS_BUILD != 'false' + run: df -h; time sudo rm -rf /var/cache/apt/archives /opt/ghc; df -h + + - name: Prepare Transifex credentials + if: env.NEEDS_BUILD != 'false' + run: | + if [ "$TRANSIFEX_USERNAME" == '$(TRANSIFEX_USERNAME)' ] || [ "$TRANSIFEX_PASSWORD" == '$(TRANSIFEX_PASSWORD)' ]; then + echo "Transifex credentials unset. Building without translations." + exit 0 + fi + printf '[https://www.transifex.com]\nhostname=https://www.transifex.com\nusername=%s\npassword=%s\n' "${TRANSIFEX_USERNAME}" "${TRANSIFEX_PASSWORD}" > $HOME/.transifexrc + ls -l $GITHUB_WORKSPACE/.transifexrc + env: + TRANSIFEX_USERNAME: $(TRANSIFEX_USERNAME) + TRANSIFEX_PASSWORD: $(TRANSIFEX_PASSWORD) + + - name: Setup docker images cache + if: env.NEEDS_BUILD != 'false' + run: | + images=$(docker images|grep -v ''|grep derex|awk '{print $1 ":" $2}') + CACHE_FROM_OPTS="" + if [ -d $IMAGE_CACHE_PATH ]; then + echo "Will use cached layers from images $images" + for image in $images; do + CACHE_FROM_OPTS="${CACHE_FROM_OPTS} --cache-from=$image" + done + else + mkdir $IMAGE_CACHE_PATH + fi + echo "CACHE_FROM_OPTS=$CACHE_FROM_OPTS" >> $GITHUB_ENV + mkdir $SENTINEL_CACHE_PATH + cp -r $CACHE_KEY_FILES $SENTINEL_CACHE_PATH + # We save the sha of the repo that built this image, so that we can push + # it only in a build of the same commit, after tests are passed + git rev-parse --verify HEAD > $SENTINEL_CACHE_PATH/built_version + + - name: Replace default docker with upsream docker and create build context + if: env.NEEDS_BUILD != 'false' + run: | + set -ex + sudo apt-get remove moby-cli -y + curl -fsSL https://get.docker.com |sudo bash + sudo mv /etc/docker/daemon.json /etc/docker/daemon.json.orig + sudo cat /etc/docker/daemon.json.orig|jq '. + {experimental: true}' |sudo tee /etc/docker/daemon.json + sudo systemctl restart docker.service + docker version + docker buildx create --use + docker images + sudo apt-get install pixz -y + + - uses: actions/setup-python@v4 + with: + python-version: "3.8" + + - name: Cache pip packages + uses: actions/cache@v3 + with: + path: $GITHUB_WORKSPACE/.cache/pip + key: pipcache | requirements_dev.txt + restore-keys: pipcache + + - name: Install derex.runner + if: env.NEEDS_BUILD != 'false' + run: | + pip3 install --cache-dir $GITHUB_WORKSPACE/.cache/pip -U pip setuptools + pip3 install --cache-dir $GITHUB_WORKSPACE/.cache/pip -r requirements_dev.txt -e . + + - name: Build nostatic image + if: env.NEEDS_BUILD != 'false' + run: derex build openedx --docker-opts "${CACHE_FROM_OPTS} --cache-to=type=inline -o type=docker" --target=nostatic $DOCKER_IMAGES_SLUG + + - name: Build dev image + if: env.NEEDS_BUILD != 'false' + run: derex build openedx --docker-opts "--cache-to=type=inline -o type=docker" --target=dev $DOCKER_IMAGES_SLUG + + - name: Check translations + if: env.NEEDS_BUILD != 'false' + run: | + docker images + derex build openedx --docker-opts "${CACHE_FROM_OPTS} --output type=docker,name={docker_image_prefix}-{target}" --target=translations $DOCKER_IMAGES_SLUG + docker images + docker run --rm derex/openedx-$DOCKER_IMAGES_SLUG-translations:$(grep __version__ derex/runner/__init__.py |sed 's/[^"]*"//;s/"//') sh -c "i18n_tool validate || (find conf|grep prob; find conf|grep prob|xargs cat; false)" || echo "Problems with translations found" + + - name: Save images + if: env.NEEDS_BUILD != 'false' + run: | + set -euxo pipefail; + docker save \ + $(derex build openedx --only-print-image-name -t nostatic $DOCKER_IMAGES_SLUG) \ + $(derex build openedx --only-print-image-name -t dev $DOCKER_IMAGES_SLUG) \ + | pixz -0 > $IMAGE_CACHE_PATH/edx-$DOCKER_IMAGES_SLUG.tar.xz diff --git a/.github/workflows/on_push.yml b/.github/workflows/on_push.yml new file mode 100644 index 000000000..9984f3016 --- /dev/null +++ b/.github/workflows/on_push.yml @@ -0,0 +1,19 @@ +name: on_push + +on: push + +jobs: + build: + uses: ./.github/workflows/build.yml + + test: + uses: ./.github/workflows/test.yml + needs: [build] + + push: + uses: ./.github/workflows/push.yml + needs: [build, test] + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml new file mode 100644 index 000000000..b9f67e13b --- /dev/null +++ b/.github/workflows/push.yml @@ -0,0 +1,69 @@ +name: Push + +on: + workflow_call: + +env: + CACHE_KEY_BASE: 22 # Increment this value to reset the images cache + CACHE_KEY_FILES: ${{ github.workspace }}/docker-definition + CACHE_KEY_IMAGES: BASE_SEED=$CACHE_KEY_BASE | $CACHE_KEY_FILES + IMAGE_CACHE_PATH: ${{ github.workspace }}/image_cache + SENTINEL_CACHE_PATH: ${{ github.workspace }}/sentinel_cache + +jobs: + Push: + runs-on: ubuntu-latest + timeout-minutes: 20 + strategy: + fail-fast: false + matrix: + OPENEDX_RELEASE: [juniper, koa, lilac] + + steps: + - uses: actions/checkout@v3 + + - name: Docker images sentinel cache + id: cache-sentinel + uses: actions/cache@v3 + with: + path: $SENTINEL_CACHE_PATH + key: SENTINEL=1 | ${{ matrix.OPENEDX_RELEASE }} | $CACHE_KEY_IMAGES + + - name: Set needs_push pipeline variable + run: | + IMAGE="derex/openedx-${{ matrix.OPENEDX_RELEASE }}-dev:$(grep __version__ derex/runner/__init__.py |sed 's/[^"]*"//;s/"//')" + echo Checking presence of ${IMAGE} on docker registry + docker manifest inspect ${IMAGE} || { echo "NEEDS_PUSH=true" >> $GITHUB_ENV; echo Image not found: pushing ; } + + - name: Log into docker registry + if: env.NEEDS_PUSH != 'false' + run: docker login -u ${DOCKER_USERNAME} -p ${DOCKER_PASSWORD} + env: + DOCKER_USERNAME: $(DOCKER_USERNAME) + DOCKER_PASSWORD: $(DOCKER_PASSWORD) + + - name: Docker images cache + if: steps.cache-sentinel.outputs.cache-hit != 'true' + uses: actions/cache@v3 + with: + path: $IMAGE_CACHE_PATH + key: BASE_SEED=$CACHE_KEY_BASE | ${{ matrix.OPENEDX_RELEASE }} | $CACHE_KEY_IMAGES + restore-keys: BASE_SEED=$CACHE_KEY_BASE | ${{ matrix.OPENEDX_RELEASE }} + + - name: Load images + if: steps.cache-sentinel.outputs.cache-hit != 'true' + run: | + [ -d $IMAGE_CACHE_PATH ] || exit 0 + ls -l $IMAGE_CACHE_PATH + docker images + set -euxo pipefail + cat $IMAGE_CACHE_PATH/edx-${{ matrix.OPENEDX_RELEASE }}.tar.xz | unxz | docker load + docker images + + - name: Push dev image + if: env.NEEDS_PUSH != 'false' + run: docker push derex/openedx-${{ matrix.OPENEDX_RELEASE }}-dev:$(grep __version__ derex/runner/__init__.py |sed 's/[^"]*"//;s/"//') + + - name: Push nostatic image + if: env.NEEDS_PUSH != 'false' + run: docker push derex/openedx-${{ matrix.OPENEDX_RELEASE }}-nostatic:$(grep __version__ derex/runner/__init__.py |sed 's/[^"]*"//;s/"//') diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 000000000..e9cbe6c09 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,525 @@ +name: Test + +on: + workflow_call: + +env: + CACHE_KEY_BASE: 22 # Increment this value to reset the images cache + CACHE_KEY_FILES: ${{ github.workspace }}/docker-definition + CACHE_KEY_IMAGES: BASE_SEED=$CACHE_KEY_BASE | $CACHE_KEY_FILES + IMAGE_CACHE_PATH: ${{ github.workspace }}/image_cache + SENTINEL_CACHE_PATH: ${{ github.workspace }}/sentinel_cache + PYTEST_ADDOPTS: --cov=derex --cov-report xml --cov-report html --cov-report term --cov-report term-missing --cov-branch --black + CURL: curl --retry-connrefused --connect-timeout 30 --retry 5 --retry-delay 5 -f + +jobs: + CheckDocsAndPreCommit: + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - uses: actions/checkout@v3 + + - uses: actions/setup-python@v4 + with: + python-version: "3.8" + + - name: Cache pip packages + uses: actions/cache@v3 + with: + path: ${{ github.workspace }}/.cache/pip + key: pipcache | requirements_dev.txt + restore-keys: pipcache + + - name: Install derex.runner + if: env.NEEDS_BUILD != 'false' + run: | + pip3 install --cache-dir ${{ github.workspace }}/.cache/pip -U pip setuptools + pip3 install --cache-dir ${{ github.workspace }}/.cache/pip -r requirements_dev.txt -e . + + - name: Compile docs + run: make docs + + - name: Check pre-commit hooks + run: pre-commit run -a + + BuildBinaries: + runs-on: ubuntu-latest + timeout-minutes: 20 + needs: [CheckDocsAndPreCommit] + strategy: + fail-fast: false + matrix: + OPENEDX_RELEASE: [juniper, koa, lilac] + + steps: + - uses: actions/checkout@v3 + + - uses: actions/setup-python@v4 + with: + python-version: "3.8" + + - name: Cache pip packages + uses: actions/cache@v3 + with: + path: ${{ github.workspace }}/.cache/pip + key: pipcache | requirements_dev.txt + restore-keys: pipcache + + - name: Install derex.runner + if: env.NEEDS_BUILD != 'false' + run: | + pip3 install --cache-dir ${{ github.workspace }}/.cache/pip -U pip setuptools + pip3 install --cache-dir ${{ github.workspace }}/.cache/pip -r requirements_dev.txt -e . + + - name: Build linux binary with pyinstaller + run: | + pip install pyinstaller + make executable + + - name: Test pyinstaller created linux binary + run: | + set -ex + ./bundle/dist/derex + ./bundle/dist/ddc-services ps + cd examples/${{ matrix.OPENEDX_RELEASE }}/minimal + ../../../bundle/dist/ddc-project config + + - uses: actions/upload-artifact@v2.2.4 + with: + name: LinuxBinary + path: ./bundle/dist/ + continue-on-error: true + + - name: Build macOS binary with pyinstaller + run: | + pip install pyinstaller scrypt + # The Openssl version on MacOS 10.14 does not support scrypt + # so we pip install it and leave a trace to pyinstaller to pick it up + echo -e "\nimport scrypt" >> bundle/executable.py + make executable + + - name: Test pyinstaller created macOS binary + run: | + set -ex + ./bundle/dist/derex --help + ./bundle/dist/ddc-services --help + cd examples/${{ matrix.OPENEDX_RELEASE }}/minimal + ../../../bundle/dist/ddc-project config + + - uses: actions/upload-artifact@v2.2.4 + with: + name: MacOSBinary + path: ./bundle/dist/ + continue-on-error: true + + RunPytests: + runs-on: ubuntu-latest + timeout-minutes: 20 + needs: [CheckDocsAndPreCommit] + strategy: + fail-fast: false + matrix: + OPENEDX_RELEASE: [juniper, koa, lilac] + + steps: + - uses: actions/checkout@v3 + + - uses: actions/setup-python@v4 + with: + python-version: "3.8" + + - name: Cache pip packages + uses: actions/cache@v3 + with: + path: ${{ github.workspace }}/.cache/pip + key: pipcache | requirements_dev.txt + restore-keys: pipcache + + - name: Install derex.runner + if: env.NEEDS_BUILD != 'false' + run: | + pip3 install --cache-dir ${{ github.workspace }}/.cache/pip -U pip setuptools + pip3 install --cache-dir ${{ github.workspace }}/.cache/pip -r requirements_dev.txt -e . + + - name: Run python tests + run: | + set -ex + set -o pipefail + pip3 --cache-dir ${{ github.workspace }}/.cache/pip install scrypt + cd tests + pytest -m "not slowtest" | grep -v codecoveragetool=Cobertura + + - uses: actions/upload-artifact@v2.2.4 + with: + name: fasttests_coverage + path: ${{ github.workspace }}/tests/.coverage + continue-on-error: true + + RunSlowPytests: + runs-on: ubuntu-latest + timeout-minutes: 20 + needs: [CheckDocsAndPreCommit] + strategy: + fail-fast: false + matrix: + OPENEDX_RELEASE: [juniper, koa, lilac] + + steps: + - uses: actions/checkout@v3 + + - uses: actions/setup-python@v4 + with: + python-version: "3.8" + + - name: Cache pip packages + uses: actions/cache@v3 + with: + path: ${{ github.workspace }}/.cache/pip + key: pipcache | requirements_dev.txt + restore-keys: pipcache + + - name: Install derex.runner + if: env.NEEDS_BUILD != 'false' + run: | + pip3 install --cache-dir ${{ github.workspace }}/.cache/pip -U pip setuptools + pip3 install --cache-dir ${{ github.workspace }}/.cache/pip -r requirements_dev.txt -e . + + - name: Docker images cache + if: steps.cache-sentinel.outputs.cache-hit != 'true' + uses: actions/cache@v3 + with: + path: $IMAGE_CACHE_PATH + key: BASE_SEED=$CACHE_KEY_BASE | ${{ matrix.OPENEDX_RELEASE }} | $CACHE_KEY_IMAGES + restore-keys: BASE_SEED=$CACHE_KEY_BASE | ${{ matrix.OPENEDX_RELEASE }} + + - name: Load images + if: steps.cache-sentinel.outputs.cache-hit != 'true' + run: | + [ -d $IMAGE_CACHE_PATH ] || exit 0 + ls -l $IMAGE_CACHE_PATH + docker images + set -euxo pipefail + cat $IMAGE_CACHE_PATH/edx-${{ matrix.OPENEDX_RELEASE }}.tar.xz | unxz | docker load + docker images + + - name: Provision services + run: | + ddc-services config + ddc-services pull + set -ex + export DEREX_ADMIN_SERVICES=False + ddc-services up -d + sleep 15 + derex reset-mailslurper + + - name: Run python tests + run: | + set -ex + set -o pipefail + cd tests + pytest -m "slowtest" | grep -v codecoveragetool=Cobertura + + - uses: actions/upload-artifact@v2.2.4 + with: + name: slowtests_coverage + path: ${{ github.workspace }}/tests/.coverage + continue-on-error: true + + TestProject: + runs-on: ubuntu-latest + timeout-minutes: 30 + needs: [CheckDocsAndPreCommit] + strategy: + fail-fast: false + matrix: + OPENEDX_RELEASE: [juniper, koa, lilac] + PROJECT_TYPE: [minimal, complete] + env: + PROJECT_TYPE: ${{ matrix.PROJECT_TYPE }} + PROJECT_PATH: examples/${{ matrix.OPENEDX_RELEASE }}/${{ matrix.PROJECT_TYPE }} + PROJECT_NAME: ${{ matrix.OPENEDX_RELEASE }}-${{ matrix.PROJECT_TYPE }} + + steps: + - uses: actions/checkout@v3 + + - uses: actions/setup-python@v4 + with: + python-version: "3.8" + + - name: Cache pip packages + uses: actions/cache@v3 + with: + path: ${{ github.workspace }}/.cache/pip + key: pipcache | requirements_dev.txt + restore-keys: pipcache + + - name: Install derex.runner as python package + if: env.NEEDS_BUILD != 'false' + run: | + pip3 install --cache-dir ${{ github.workspace }}/.cache/pip -U pip setuptools + pip3 install --cache-dir ${{ github.workspace }}/.cache/pip . + + - name: Docker images cache + if: steps.cache-sentinel.outputs.cache-hit != 'true' + uses: actions/cache@v3 + with: + path: $IMAGE_CACHE_PATH + key: BASE_SEED=$CACHE_KEY_BASE | ${{ matrix.OPENEDX_RELEASE }} | $CACHE_KEY_IMAGES + restore-keys: BASE_SEED=$CACHE_KEY_BASE | ${{ matrix.OPENEDX_RELEASE }} + + - name: Load images + if: steps.cache-sentinel.outputs.cache-hit != 'true' + run: | + [ -d $IMAGE_CACHE_PATH ] || exit 0 + ls -l $IMAGE_CACHE_PATH + docker images + set -euxo pipefail + cat $IMAGE_CACHE_PATH/edx-${{ matrix.OPENEDX_RELEASE }}.tar.xz | unxz | docker load + docker images + + - name: Provision services + run: | + ddc-services config + ddc-services pull + set -ex + export DEREX_ADMIN_SERVICES=False + ddc-services up -d + sleep 15 + derex reset-mailslurper + + - name: Provision project + run: cd $PROJECT_PATH; ddc-project config + + - name: Build project final image + if: ${{ matrix.PROJECT_TYPE == 'complete' }} + run: cd $PROJECT_PATH && derex build project + + - name: Prime Mysql DB + run: | + set -ex + cd $PROJECT_PATH + derex mysql reset --force + + - name: Prime MinIO bucket + run: | + set -ex + cd $PROJECT_PATH + derex minio-update-key + derex create-bucket --no-tty + + - name: Prime Rabbitmq + run: | + set -ex + cd $PROJECT_PATH + derex reset-rabbitmq + + - name: Add hosts to /etc/hosts + run: echo 127.0.0.1 localhost studio.$PROJECT_NAME.localhost $PROJECT_NAME.localhost | sudo tee -a /etc/hosts + + - name: Show LMS/CMS logs + run: cd $PROJECT_PATH; ddc-project logs lms cms + + - name: Set production settings + if: ${{ matrix.PROJECT_TYPE == 'complete' }} + run: cd $PROJECT_PATH && derex settings production + + - name: Compile theme + if: ${{ matrix.PROJECT_TYPE == 'complete' }} + run: cd $PROJECT_PATH && derex compile-theme + + - name: Test image with dive + run: | + set -ex + cd $PROJECT_PATH + docker images + wget -q https://github.com/wagoodman/dive/releases/download/v0.9.2/dive_0.9.2_linux_amd64.deb + DEBIAN_FRONTEND=noninteractive sudo -E apt-get install -y ./dive_0.9.2_linux_amd64.deb + #dive --ci $PROJECT_NAME/openedx-themes + echo "Skipping image analysis" + + - name: Start project services in debug runmode + run: | + set -ex + cd $PROJECT_PATH + ddc-project up -d + sleep 5 # Give it time to start up + + - name: Show logs (debug runmode) + run: | + cd $PROJECT_PATH + ddc-project config + ddc-project logs + + - name: Test project fixtures + if: ${{ matrix.PROJECT_TYPE == 'complete' }} + run: | + set -ex + cd $PROJECT_PATH + # Run a command to get the current database name. First run it with all output enabled + # so that in case of errors in the pipeline we have info about what went wrong + ddc-project exec -T lms sh -c 'echo '"'"'from django.conf import settings; print(settings.DATABASES["default"]["NAME"])'"'"' |./manage.py lms shell' + # ...and then the actual run + DATABASE_NAME=$(ddc-project exec -T lms sh -c 'echo '"'"'from django.conf import settings; print(settings.DATABASES["default"]["NAME"])'"'"' |./manage.py lms shell 2> /dev/null' 2> /dev/null) + ddc-services exec -T mysql mysql -h localhost --protocol tcp -u root -p$(derex debug print-secret mysql) ${DATABASE_NAME} -e "SELECT * from auth_user WHERE username='derex.runner'"|grep derex.runner + + - name: Curl the LMS (debug runmode) + run: $CURL http://$PROJECT_NAME.localhost + + - name: Curl the LMS example plugin view (debug runmode) + if: ${{ matrix.PROJECT_TYPE == 'complete' }} + run: $CURL http://$PROJECT_NAME.localhost/example_view + + - name: Curl the CMS (debug runmode) + run: $CURL http://studio.$PROJECT_NAME.localhost + + - name: Start project services in production runmode + run: | + set -ex + cd $PROJECT_PATH + derex runmode production --force + ddc-project up -d + sleep 5 # Give it time to start up + + - name: Show logs (production runmode) + run: | + cd $PROJECT_PATH + ddc-project config + ddc-project logs + + - name: Curl the LMS (production runmode) + run: $CURL http://$PROJECT_NAME.localhost/ || (sleep 10; $CURL http://$PROJECT_NAME.localhost/) + + - name: Curl the LMS example plugin view (production runmode) + if: ${{ matrix.PROJECT_TYPE == 'complete' }} + run: $CURL http://$PROJECT_NAME.localhost/example_view + + - name: Curl the CMS (production runmode) + if: ${{ matrix.PROJECT_TYPE == 'complete' }} + run: $CURL http://studio.$PROJECT_NAME.localhost/ || (sleep 10; $CURL http://studio.$PROJECT_NAME.localhost/) + + - name: Curl the LMS CSS and make sure our theme CSS is in + if: ${{ matrix.PROJECT_TYPE == 'complete' }} + run: | + set -ex + $CURL http://$PROJECT_NAME.localhost/|grep static/demo-theme/css/lms-main-v1.css + $CURL http://$PROJECT_NAME.localhost/static/demo-theme/css/lms-main-v1.css | grep this_is_a_customized_theme -q + + - name: Check flower + run: | + set -x + cd $PROJECT_PATH + echo 127.0.0.1 localhost flower.$PROJECT_NAME.localhost | sudo tee -a /etc/hosts + while ! (ddc-project logs cms_worker|grep ready); do sleep 1; done + while ! (ddc-project logs lms_worker|grep ready); do sleep 1; done + curl -m10 --retry-connrefused --connect-timeout 30 --retry 5 --retry-delay 5 http://flower.$PROJECT_NAME.localhost/dashboard?json=1|grep celery@openedx.lms + curl -m10 --retry-connrefused --connect-timeout 30 --retry 5 --retry-delay 5 http://flower.$PROJECT_NAME.localhost/dashboard?json=1|grep celery@openedx.cms + + - name: Check celerybeat + if: ${{ matrix.PROJECT_TYPE == 'complete' }} + run: | + # This test is relying on the djcelery fixtures which should + # have been loaded into the database when priming mysql + set -ex + cd $PROJECT_PATH + CELERYBEAT_RESULT=$(ddc-project logs lms_worker | grep "Scheduler: Sending due task debug" -c) + WORKER_RESULT=$(ddc-project logs lms_worker | grep "pong" -c) + if [ $CELERYBEAT_RESULT -gt 0 ] && [ $WORKER_RESULT -gt 0 ]; then + exit 0 + fi + exit 1 + + - name: Run project e2e tests + if: ${{ matrix.PROJECT_TYPE == 'complete' }} + run: | + set -ex + cd $PROJECT_PATH/e2e && npm ci && cd .. + export HTTP_PROXY=http://127.0.0.1:80 + derex test e2e + + - name: Show services and project logs + run: | + ddc-services logs + cd $PROJECT_PATH; ddc-project logs + + - name: Publish Cypress tests screenshots folder + if: ${{ matrix.PROJECT_TYPE == 'complete' }} + uses: actions/upload-artifact@v2.2.4 + with: + name: $PROJECT_NAME-e2e-screenshots + path: ${{ github.workspace }}/$PROJECT_PATH/e2e/cypress/screenshots + continue-on-error: true + + - name: Publish Cypress tests videos folder + if: ${{ matrix.PROJECT_TYPE == 'complete' }} + uses: actions/upload-artifact@v2.2.4 + with: + name: $PROJECT_NAME-e2e-videos + path: ${{ github.workspace }}/$PROJECT_PATH/e2e/cypress/videos + continue-on-error: true + + CombineCoverage: + runs-on: ubuntu-latest + timeout-minutes: 10 + needs: [RunPytests, RunSlowPytests] + + steps: + - uses: actions/checkout@v3 + + - uses: actions/setup-python@v4 + with: + python-version: "3.8" + + - name: Cache pip packages + uses: actions/cache@v3 + with: + path: ${{ github.workspace }}/.cache/pip + key: pipcache | requirements_dev.txt + restore-keys: pipcache + + - name: Install derex.runner + if: env.NEEDS_BUILD != 'false' + run: | + pip3 install --cache-dir ${{ github.workspace }}/.cache/pip -U pip setuptools + pip3 install --cache-dir ${{ github.workspace }}/.cache/pip -r requirements_dev.txt -e . + + - uses: actions/download-artifact@v3 + with: + name: fasttests_coverage + + - uses: actions/download-artifact@v3 + with: + name: slowtests_coverage + + - name: Cache npm packages + uses: actions/cache@v3 + with: + path: $GITHUB_WORKSPACE/.cache/npm + key: npmcache2 + + - name: Fix coverage result + if: always() + run: | + set -ex + cp ${{ github.workspace }}/.coverage tests/.coverage.slow + cp ${{ github.workspace }}/.coverage tests/.coverage.fast + cd tests + coverage combine + coverage html + coverage xml + cd .. + # We installed the package with pip, and coverage reports the full absolute path. + # We cut to derex/runner/etc/etc + DEREX_RUNNER_PATH=`cd tests;python -c "from pathlib import Path; import derex.runner;print(Path(derex.runner.__file__).parent.parent.parent)"` + echo Replacing ${DEREX_RUNNER_PATH} in tests/htmlcov/*.html + sudo npm config set cache ${{ github.workspace }}/.cache/npm --global + npm install juice + # Azure pipelines strips style sheets but leaves styles in place. + # juice can embed the styles in the HTML for us and present a much better + # view in the coverage results tab. + for filename in tests/htmlcov/*.html; do $(npm bin)/juice $filename $filename; done + cp tests/coverage.xml ${{ github.workspace }} + + - name: Code coverage summary report + uses: irongut/CodeCoverageSummary@v1.3.0 + with: + filename: coverage.xml diff --git a/azure-pipelines.yml b/azure-pipelines.yml deleted file mode 100644 index c0fec5a47..000000000 --- a/azure-pipelines.yml +++ /dev/null @@ -1,29 +0,0 @@ -schedules: - - cron: "30 06 * * *" - displayName: Daily build - branches: - include: - - master - always: true - -stages: - - template: azure-pipelines/build.yml - - template: azure-pipelines/test.yml - - template: azure-pipelines/push.yml - -variables: - - name: CACHE_KEY_BASE # Increment this value to reset the images cache - value: 22 - - name: CACHE_KEY_FILES - value: $(System.DefaultWorkingDirectory)/docker-definition/**/* - - name: CACHE_KEY_IMAGES - value: BASE_SEED=$(CACHE_KEY_BASE) | $(CACHE_KEY_FILES) - - name: IMAGE_CACHE_PATH - value: $(Pipeline.Workspace)/image_cache - - name: SENTINEL_CACHE_PATH - value: $(Pipeline.Workspace)/sentinel_cache - - name: UBUNTU_VERSION - value: ubuntu-latest - - - name: needs_build - value: "false" diff --git a/azure-pipelines/build.yml b/azure-pipelines/build.yml deleted file mode 100644 index 606b7e92d..000000000 --- a/azure-pipelines/build.yml +++ /dev/null @@ -1,37 +0,0 @@ -stages: - - stage: Build - variables: - - group: Transifex credentials - - jobs: - - job: juniper - timeoutInMinutes: 180 - pool: - vmImage: $(UBUNTU_VERSION) - continueOnError: true # bad translations currently cause errors - variables: - - name: DOCKER_IMAGES_SLUG - value: juniper - steps: - - template: build_single_version.yml - - - job: koa - timeoutInMinutes: 180 - pool: - vmImage: $(UBUNTU_VERSION) - continueOnError: true # bad translations currently cause errors - variables: - - name: DOCKER_IMAGES_SLUG - value: koa - steps: - - template: build_single_version.yml - - job: lilac - timeoutInMinutes: 180 - pool: - vmImage: $(UBUNTU_VERSION) - - variables: - - name: DOCKER_IMAGES_SLUG - value: lilac - steps: - - template: build_single_version.yml diff --git a/azure-pipelines/build_single_version.yml b/azure-pipelines/build_single_version.yml deleted file mode 100644 index 0b682e46f..000000000 --- a/azure-pipelines/build_single_version.yml +++ /dev/null @@ -1,102 +0,0 @@ -steps: - # Sentinel cache: if this one hits, it means the image cache will hit too - # We don't use the image cache for performance reasons - - task: Cache@2 - inputs: - key: SENTINEL=1 | $(DOCKER_IMAGES_SLUG) | $(CACHE_KEY_IMAGES) - path: $(SENTINEL_CACHE_PATH) - cacheHitVar: "ImagesSentinelHit" - displayName: "Docker images sentinel cache" - - - template: load_images.yml - parameters: - DOCKER_IMAGES_SLUG: $(DOCKER_IMAGES_SLUG) - CONDITION: ne(variables.ImagesSentinelHit, 'true') - - - script: echo '##vso[task.setvariable variable=needs_build]true' - condition: ne(variables.ImagesSentinelHit, 'true') - displayName: "Set needs_build pipeline variable" - - - script: df -h; time sudo rm -rf /var/cache/apt/archives /opt/ghc; df -h - condition: and(succeeded(), ne(variables.needs_build, 'false')) - displayName: "Remove Haskell compiler and cached apt archives to save disk space" - - - script: | - if [ "$TRANSIFEX_USERNAME" == '$(TRANSIFEX_USERNAME)' ] || [ "$TRANSIFEX_PASSWORD" == '$(TRANSIFEX_PASSWORD)' ]; then - echo "Transifex credentials unset. Building without translations." - exit 0 - fi - printf '[https://www.transifex.com]\nhostname=https://www.transifex.com\nusername=%s\npassword=%s\n' "${TRANSIFEX_USERNAME}" "${TRANSIFEX_PASSWORD}" > $HOME/.transifexrc - ls -l $HOME/.transifexrc - condition: and(succeeded(), ne(variables.needs_build, 'false')) - displayName: "Prepare Transifex credentials" - env: - TRANSIFEX_USERNAME: $(TRANSIFEX_USERNAME) - TRANSIFEX_PASSWORD: $(TRANSIFEX_PASSWORD) - - - script: | - images=$(docker images|grep -v ''|grep derex|awk '{print $1 ":" $2}') - CACHE_FROM_OPTS="" - if [ -d "$(IMAGE_CACHE_PATH)" ]; then - echo "Will use cached layers from images $images" - for image in $images; do - CACHE_FROM_OPTS="${CACHE_FROM_OPTS} --cache-from=$image" - done - else - mkdir $(IMAGE_CACHE_PATH) - fi - echo "##vso[task.setvariable variable=CACHE_FROM_OPTS]${CACHE_FROM_OPTS}" - mkdir $(SENTINEL_CACHE_PATH) - cp -r $(CACHE_KEY_FILES) $(SENTINEL_CACHE_PATH) - # We save the sha of the repo that built this image, so that we can push - # it only in a build of the same commit, after tests are passed - git rev-parse --verify HEAD > $(SENTINEL_CACHE_PATH)/built_version - condition: and(succeeded(), ne(variables.needs_build, 'false')) - displayName: "Setup docker images cache" - - - script: | - set -ex - sudo apt-get remove moby-cli -y - curl -fsSL https://get.docker.com |sudo bash - sudo mv /etc/docker/daemon.json /etc/docker/daemon.json.orig - sudo cat /etc/docker/daemon.json.orig|jq '. + {experimental: true}' |sudo tee /etc/docker/daemon.json - sudo systemctl restart docker.service - docker version - docker buildx create --use - docker images - condition: and(succeeded(), ne(variables.needs_build, 'false')) - displayName: Replace Microsoft docker with upsream docker and create build context - - - script: sudo apt-get install pixz -y - condition: and(succeeded(), ne(variables.needs_build, 'false')) - displayName: Install pixz - - - template: prepare.yml - parameters: - CONDITION: "ne(variables.needs_build, 'false')" - - - script: derex build openedx --docker-opts "${CACHE_FROM_OPTS} --cache-to=type=inline -o type=docker" --target=nostatic $(DOCKER_IMAGES_SLUG) - condition: and(succeeded(), ne(variables.needs_build, 'false')) - displayName: Build nostatic image - - - script: derex build openedx --docker-opts "--cache-to=type=inline -o type=docker" --target=dev $(DOCKER_IMAGES_SLUG) - condition: and(succeeded(), ne(variables.needs_build, 'false')) - displayName: Build dev image - - - script: | - docker images - derex build openedx --docker-opts "${CACHE_FROM_OPTS} --output type=docker,name={docker_image_prefix}-{target}" --target=translations $(DOCKER_IMAGES_SLUG) - docker images - docker run --rm derex/openedx-${DOCKER_IMAGES_SLUG}-translations:$(grep __version__ derex/runner/__init__.py |sed 's/[^"]*"//;s/"//') sh -c "i18n_tool validate || (find conf|grep prob; find conf|grep prob|xargs cat; false)" || - echo "##vso[task.logissue type=error]Problems with translations found" - condition: and(succeeded(), ne(variables.needs_build, 'false')) - displayName: Check translations - - - script: | - set -euxo pipefail; - docker save \ - $(derex build openedx --only-print-image-name -t nostatic $(DOCKER_IMAGES_SLUG)) \ - $(derex build openedx --only-print-image-name -t dev $(DOCKER_IMAGES_SLUG)) \ - | pixz -0 > $(IMAGE_CACHE_PATH)/edx-$(DOCKER_IMAGES_SLUG).tar.xz - condition: and(succeeded(), ne(variables.needs_build, 'false')) - displayName: Save images diff --git a/azure-pipelines/load_images.yml b/azure-pipelines/load_images.yml deleted file mode 100644 index b98e9a773..000000000 --- a/azure-pipelines/load_images.yml +++ /dev/null @@ -1,26 +0,0 @@ -parameters: - - name: DOCKER_IMAGES_SLUG - type: string - - name: CONDITION - type: string - default: "true" - -steps: - - task: Cache@2 - inputs: - key: BASE_SEED=$(CACHE_KEY_BASE) | ${{ parameters.DOCKER_IMAGES_SLUG }} | $(CACHE_KEY_IMAGES) - path: $(IMAGE_CACHE_PATH) - cacheHitVar: "ImagesHit" - restoreKeys: BASE_SEED=$(CACHE_KEY_BASE) | ${{ parameters.DOCKER_IMAGES_SLUG }} - condition: ${{ parameters.CONDITION }} - displayName: "Docker images cache" - - - script: | - [ -d $(IMAGE_CACHE_PATH) ] || exit 0 - ls -l $(IMAGE_CACHE_PATH) - docker images - set -euxo pipefail - cat $(IMAGE_CACHE_PATH)/edx-${{ parameters.DOCKER_IMAGES_SLUG }}.tar.xz | unxz | docker load - docker images - condition: ${{ parameters.CONDITION }} - displayName: "Load ${{ parameters.DOCKER_IMAGES_SLUG }} images" diff --git a/azure-pipelines/prepare.yml b/azure-pipelines/prepare.yml deleted file mode 100644 index 53054366d..000000000 --- a/azure-pipelines/prepare.yml +++ /dev/null @@ -1,35 +0,0 @@ -parameters: - - name: CONDITION - type: string - default: "true" - - name: AS_PYTHON_PACKAGE - type: boolean - default: False - -steps: - - task: UsePythonVersion@0 - inputs: - versionSpec: "3.8" - condition: ${{ parameters.CONDITION }} - displayName: "Use Python 3.8" - - - task: Cache@2 - inputs: - key: pipcache | requirements_dev.txt - path: $(Pipeline.Workspace)/.cache/pip - restoreKeys: | - pipcache - condition: ${{ parameters.CONDITION }} - displayName: Cache pip packages - - - script: | - pip3 install --cache-dir $(Pipeline.Workspace)/.cache/pip -U pip setuptools - pip3 install --cache-dir $(Pipeline.Workspace)/.cache/pip -r requirements_dev.txt -e . git+https://github.com/tonybaloney/pytest-azurepipelines.git - condition: and(${{ parameters.CONDITION }}, eq(${{ parameters.AS_PYTHON_PACKAGE }}, False)) - displayName: "Install derex.runner" - - - script: | - pip3 install --cache-dir $(Pipeline.Workspace)/.cache/pip -U pip setuptools - pip3 install --cache-dir $(Pipeline.Workspace)/.cache/pip . git+https://github.com/tonybaloney/pytest-azurepipelines.git - condition: and(${{ parameters.CONDITION }}, eq(${{ parameters.AS_PYTHON_PACKAGE }}, True)) - displayName: "Install derex.runner as python package" diff --git a/azure-pipelines/provision_project.yml b/azure-pipelines/provision_project.yml deleted file mode 100644 index 34a01a87e..000000000 --- a/azure-pipelines/provision_project.yml +++ /dev/null @@ -1,72 +0,0 @@ -parameters: - - name: PROJECT_TYPE - type: string - - name: PROJECT_PATH - type: string - - name: PROJECT_NAME - type: string - -steps: - - script: | - cd ${{ parameters.PROJECT_PATH }}; ddc-project config - condition: always() - displayName: "Show ddc-project config" - - - script: cd ${{ parameters.PROJECT_PATH }} && derex build project - condition: eq('${{ parameters.PROJECT_TYPE }}', 'complete') - displayName: Build project final image - - - script: | - set -ex - cd ${{ parameters.PROJECT_PATH }} - derex mysql reset --force - displayName: "Prime Mysql DB" - - - script: | - set -ex - cd ${{ parameters.PROJECT_PATH }} - derex minio-update-key - derex create-bucket --no-tty - displayName: "Prime MinIO bucket" - - - script: | - set -ex - cd ${{ parameters.PROJECT_PATH }} - derex reset-rabbitmq - displayName: "Prime Rabbitmq" - - - script: echo 127.0.0.1 localhost studio.${{ parameters.PROJECT_NAME }}.localhost ${{ parameters.PROJECT_NAME }}.localhost | sudo tee -a /etc/hosts - displayName: Add studio.${{ parameters.PROJECT_NAME }}.localhost and ${{ parameters.PROJECT_NAME }}.localhost to /etc/hosts - - - script: cd ${{ parameters.PROJECT_PATH }}; ddc-project logs lms cms - condition: always() - displayName: Show LMS/CMS logs - - - script: cd ${{ parameters.PROJECT_PATH }} && derex settings production - condition: eq('${{ parameters.PROJECT_TYPE }}', 'complete') - displayName: Set production settings - - - script: cd ${{ parameters.PROJECT_PATH }} && derex compile-theme - condition: eq('${{ parameters.PROJECT_TYPE }}', 'complete') - displayName: Compile theme - - # XXX: Work to have an efficient docker image and re-enable dive checks - - script: | - set -ex - cd ${{ parameters.PROJECT_PATH }} - docker images - - # This should not be needed, but for some reason Azure does not tag the image - docker tag $(docker images -q|head -n1) ${{ parameters.PROJECT_NAME }}/openedx-themes - docker images - - echo Installing dive - wget -q https://github.com/wagoodman/dive/releases/download/v0.9.2/dive_0.9.2_linux_amd64.deb - DEBIAN_FRONTEND=noninteractive sudo -E apt-get install -y ./dive_0.9.2_linux_amd64.deb - - echo Analyzing image - # dive --ci ${{ parameters.PROJECT_NAME }}/openedx-themes - echo "Skipping image analysis" - condition: always() - displayName: Test the ${{ parameters.PROJECT_NAME }} image with dive - timeoutInMinutes: 40 diff --git a/azure-pipelines/provision_services.yml b/azure-pipelines/provision_services.yml deleted file mode 100644 index 0748a7ca7..000000000 --- a/azure-pipelines/provision_services.yml +++ /dev/null @@ -1,20 +0,0 @@ -steps: - - script: ddc-services config - condition: always() - displayName: "Show ddc-services config" - - - script: ddc-services pull - displayName: "Pull edX services images" - - - script: | - set -ex - export DEREX_ADMIN_SERVICES=False - ddc-services up -d - # Give it some seconds to boot - sleep 15 - displayName: "Start services docker containers" - - - script: | - set -ex - derex reset-mailslurper - displayName: "Prime Mailslurper database" diff --git a/azure-pipelines/push.yml b/azure-pipelines/push.yml deleted file mode 100644 index 3ca2a2101..000000000 --- a/azure-pipelines/push.yml +++ /dev/null @@ -1,33 +0,0 @@ -stages: - - stage: Push - # Check if pushing is needed only on `master` and `build` branches - condition: and(succeeded(), or(eq(variables['Build.SourceBranch'], 'refs/heads/master'), eq(variables['Build.SourceBranch'], 'refs/heads/build') )) - variables: - - group: Docker credentials - jobs: - - job: juniper - pool: - vmImage: $(UBUNTU_VERSION) - variables: - - name: DOCKER_IMAGES_SLUG - value: juniper - steps: - - template: push_single_version.yml - - - job: koa - pool: - vmImage: $(UBUNTU_VERSION) - variables: - - name: DOCKER_IMAGES_SLUG - value: koa - steps: - - template: push_single_version.yml - - - job: lilac - pool: - vmImage: $(UBUNTU_VERSION) - variables: - - name: DOCKER_IMAGES_SLUG - value: lilac - steps: - - template: push_single_version.yml diff --git a/azure-pipelines/push_single_version.yml b/azure-pipelines/push_single_version.yml deleted file mode 100644 index 4fc08c74c..000000000 --- a/azure-pipelines/push_single_version.yml +++ /dev/null @@ -1,33 +0,0 @@ -steps: - - task: Cache@2 - inputs: - key: SENTINEL=1 | $(DOCKER_IMAGES_SLUG) | $(CACHE_KEY_IMAGES) - path: $(SENTINEL_CACHE_PATH) - cacheHitVar: "ImagesSentinelHit" - displayName: "Docker images sentinel cache" - - - script: | - IMAGE="derex/openedx-$(DOCKER_IMAGES_SLUG)-dev:$(grep __version__ derex/runner/__init__.py |sed 's/[^"]*"//;s/"//')" - echo Checking presence of ${IMAGE} on docker registry - docker manifest inspect ${IMAGE} || { echo '##vso[task.setvariable variable=needs_push]true'; echo Image not found: pushing ; } - displayName: "Set needs_push pipeline variable" - - - script: docker login -u ${DOCKER_USERNAME} -p ${DOCKER_PASSWORD} - displayName: "Log into docker registry" - condition: ne(variables.needs_push, 'false') - env: - DOCKER_USERNAME: $(DOCKER_USERNAME) - DOCKER_PASSWORD: $(DOCKER_PASSWORD) - - - template: load_images.yml - parameters: - DOCKER_IMAGES_SLUG: $(DOCKER_IMAGES_SLUG) - CONDITION: "ne(variables.needs_push, 'false')" - - - script: docker push derex/openedx-$(DOCKER_IMAGES_SLUG)-dev:$(grep __version__ derex/runner/__init__.py |sed 's/[^"]*"//;s/"//') - condition: ne(variables.needs_push, 'false') - displayName: "Push $(DOCKER_IMAGES_SLUG) dev image" - - - script: docker push derex/openedx-$(DOCKER_IMAGES_SLUG)-nostatic:$(grep __version__ derex/runner/__init__.py |sed 's/[^"]*"//;s/"//') - condition: ne(variables.needs_push, 'false') - displayName: "Push $(DOCKER_IMAGES_SLUG) nostatic image" diff --git a/azure-pipelines/run_project_tests.yml b/azure-pipelines/run_project_tests.yml deleted file mode 100644 index 8184dfb52..000000000 --- a/azure-pipelines/run_project_tests.yml +++ /dev/null @@ -1,129 +0,0 @@ -parameters: - - name: PROJECT_TYPE - type: string - - name: PROJECT_PATH - type: string - - name: PROJECT_NAME - type: string - -steps: - - script: | - set -ex - cd ${{ parameters.PROJECT_PATH }} - ddc-project up -d - sleep 5 # Give it time to start up - displayName: "Start project services in debug runmode" - - - script: | - cd ${{ parameters.PROJECT_PATH }} - ddc-project config - ddc-project logs - condition: always() - displayName: "Show logs (debug runmode)" - - - script: | - set -ex - cd ${{ parameters.PROJECT_PATH }} - # Run a command to get the current database name. First run it with all output enabled - # so that in case of errors in the pipeline we have info about what went wrong - ddc-project exec -T lms sh -c 'echo '"'"'from django.conf import settings; print(settings.DATABASES["default"]["NAME"])'"'"' |./manage.py lms shell' - # ...and then the actual run - DATABASE_NAME=$(ddc-project exec -T lms sh -c 'echo '"'"'from django.conf import settings; print(settings.DATABASES["default"]["NAME"])'"'"' |./manage.py lms shell 2> /dev/null' 2> /dev/null) - ddc-services exec -T mysql mysql -h localhost --protocol tcp -u root -p$(derex debug print-secret mysql) ${DATABASE_NAME} -e "SELECT * from auth_user WHERE username='derex.runner'"|grep derex.runner - condition: eq('${{ parameters.PROJECT_TYPE }}', 'complete') - displayName: "Test ${{ parameters.PROJECT_NAME }} project fixtures" - - - script: $(CURL) http://${{ parameters.PROJECT_NAME }}.localhost - displayName: Curl the LMS (debug runmode) - - - script: $(CURL) http://${{ parameters.PROJECT_NAME }}.localhost/example_view - displayName: Curl the LMS example plugin view (debug runmode) - condition: eq('${{ parameters.PROJECT_TYPE }}', 'complete') - - - script: $(CURL) http://studio.${{ parameters.PROJECT_NAME }}.localhost - displayName: Curl the CMS (debug runmode) - - - script: | - set -ex - cd ${{ parameters.PROJECT_PATH }} - derex runmode production --force - ddc-project up -d - sleep 5 # Give it time to start up - displayName: "Start project services in production runmode" - - - script: | - cd ${{ parameters.PROJECT_PATH }} - ddc-project config - ddc-project logs - condition: always() - displayName: "Show logs (production runmode)" - - - script: | - $(CURL) http://${{ parameters.PROJECT_NAME }}.localhost/ || (sleep 10; $(CURL) http://${{ parameters.PROJECT_NAME }}.localhost/) - displayName: "Curl the LMS (production runmode)" - - - script: $(CURL) http://${{ parameters.PROJECT_NAME }}.localhost/example_view - displayName: "Curl the LMS example plugin view (production runmode)" - condition: eq('${{ parameters.PROJECT_TYPE }}', 'complete') - - - script: $(CURL) http://studio.${{ parameters.PROJECT_NAME }}.localhost/ || (sleep 10; $(CURL) http://studio.${{ parameters.PROJECT_NAME }}.localhost/) - displayName: "Curl the CMS (production runmode)" - - - script: | - set -ex - $(CURL) http://${{ parameters.PROJECT_NAME }}.localhost/|grep static/demo-theme/css/lms-main-v1.css - $(CURL) http://${{ parameters.PROJECT_NAME }}.localhost/static/demo-theme/css/lms-main-v1.css | grep this_is_a_customized_theme -q - condition: eq('${{ parameters.PROJECT_TYPE }}', 'complete') - displayName: "Curl the LMS CSS and make sure our theme CSS is in" - - - script: | - set -x - cd ${{ parameters.PROJECT_PATH }} - echo 127.0.0.1 localhost flower.${{ parameters.PROJECT_NAME }}.localhost | sudo tee -a /etc/hosts - while ! (ddc-project logs cms_worker|grep ready); do sleep 1; done - while ! (ddc-project logs lms_worker|grep ready); do sleep 1; done - curl -m10 --retry-connrefused --connect-timeout 30 --retry 5 --retry-delay 5 http://flower.${{ parameters.PROJECT_NAME }}.localhost/dashboard?json=1|grep celery@openedx.lms - curl -m10 --retry-connrefused --connect-timeout 30 --retry 5 --retry-delay 5 http://flower.${{ parameters.PROJECT_NAME }}.localhost/dashboard?json=1|grep celery@openedx.cms - displayName: "Check flower" - - - script: | - # This test is relying on the djcelery fixtures which should - # have been loaded into the database when priming mysql - set -ex - cd ${{ parameters.PROJECT_PATH }} - CELERYBEAT_RESULT=$(ddc-project logs lms_worker | grep "Scheduler: Sending due task debug" -c) - WORKER_RESULT=$(ddc-project logs lms_worker | grep "pong" -c) - if [ $CELERYBEAT_RESULT -gt 0 ] && [ $WORKER_RESULT -gt 0 ]; then - exit 0 - fi - exit 1 - condition: eq('${{ parameters.PROJECT_TYPE }}', 'complete') - displayName: "Check celerybeat" - - - script: | - set -ex - cd ${{ parameters.PROJECT_PATH }}/e2e && npm ci && cd .. - export HTTP_PROXY=http://127.0.0.1:80 - derex test e2e - condition: eq('${{ parameters.PROJECT_TYPE }}', 'complete') - displayName: "Run project e2e tests" - - - script: | - ddc-services logs - cd ${{ parameters.PROJECT_PATH }}; ddc-project logs - condition: always() - displayName: "Show services and project logs" - - - task: PublishBuildArtifacts@1 - inputs: - pathToPublish: $(System.DefaultWorkingDirectory)/${{ parameters.PROJECT_PATH }}/e2e/cypress/screenshots - artifactName: ${{ parameters.PROJECT_NAME }}-e2e-screenshots - condition: and(always(), eq('${{ parameters.PROJECT_TYPE }}', 'complete')) - displayName: "Publish Cypress tests screenshots folder" - - - task: PublishBuildArtifacts@1 - inputs: - pathToPublish: $(System.DefaultWorkingDirectory)/${{ parameters.PROJECT_PATH }}/e2e/cypress/videos - artifactName: ${{ parameters.PROJECT_NAME }}-e2e-videos - condition: and(always(), eq('${{ parameters.PROJECT_TYPE }}', 'complete')) - displayName: "Publish Cypress tests videos folder" diff --git a/azure-pipelines/test.yml b/azure-pipelines/test.yml deleted file mode 100644 index 88af487dc..000000000 --- a/azure-pipelines/test.yml +++ /dev/null @@ -1,233 +0,0 @@ -stages: - - stage: Test - variables: - PYTEST_ADDOPTS: --cov=derex --cov-report xml --cov-report html --cov-report term --cov-report term-missing --cov-branch --black - jobs: - - job: PyInstallerBuildLinux - pool: - vmImage: $(UBUNTU_VERSION) - steps: - - template: prepare.yml - - template: test_pyinstaller.yml - parameters: - OS: ubuntu - OPENEDX_RELEASE: juniper - - template: test_pyinstaller.yml - parameters: - OS: ubuntu - OPENEDX_RELEASE: koa - - template: test_pyinstaller.yml - parameters: - OS: ubuntu - OPENEDX_RELEASE: lilac - - - job: PyInstallerBuildMacOs - pool: - vmImage: "macOS-10.15" - steps: - - template: prepare.yml - - template: test_pyinstaller.yml - parameters: - OS: macos - OPENEDX_RELEASE: juniper - - template: test_pyinstaller.yml - parameters: - OS: macos - OPENEDX_RELEASE: koa - - template: test_pyinstaller.yml - parameters: - OS: macos - OPENEDX_RELEASE: lilac - - - job: RunPytests - timeoutInMinutes: 40 - pool: - vmImage: $(UBUNTU_VERSION) - steps: - - template: prepare.yml - - - script: | - set -ex - set -o pipefail - pip3 --cache-dir $(Pipeline.Workspace)/.cache/pip install scrypt - cd tests - pytest -m "not slowtest" | grep -v codecoveragetool=Cobertura - displayName: "Run python tests" - - - publish: $(System.DefaultWorkingDirectory)/tests/.coverage - artifact: fasttests_coverage - - - job: RunSlowPytests - timeoutInMinutes: 40 - pool: - vmImage: $(UBUNTU_VERSION) - steps: - - template: prepare.yml - - template: load_images.yml - parameters: - DOCKER_IMAGES_SLUG: juniper - - template: load_images.yml - parameters: - DOCKER_IMAGES_SLUG: koa - - template: load_images.yml - parameters: - DOCKER_IMAGES_SLUG: lilac - - template: provision_services.yml - - - script: | - set -ex - set -o pipefail - cd tests - pytest -m "slowtest" | grep -v codecoveragetool=Cobertura - displayName: "Run python tests" - - - publish: $(System.DefaultWorkingDirectory)/tests/.coverage - artifact: slowtests_coverage - - - job: CombineCoverage - dependsOn: - - RunPytests - - RunSlowPytests - pool: - vmImage: $(UBUNTU_VERSION) - steps: - - template: prepare.yml - - - download: current - - - task: Cache@2 - inputs: - key: npmcache2 - path: $(Pipeline.Workspace)/.cache/npm - displayName: Cache npm packages - - - script: | - set -ex - cp $(Pipeline.Workspace)/slowtests_coverage/.coverage tests/.coverage.slow - cp $(Pipeline.Workspace)/fasttests_coverage/.coverage tests/.coverage.fast - cd tests - coverage combine - coverage html - coverage xml - cd .. - # We installed the package with pip, and coverage reports the full absolute path. - # We cut to derex/runner/etc/etc - DEREX_RUNNER_PATH=`cd tests;python -c "from pathlib import Path; import derex.runner;print(Path(derex.runner.__file__).parent.parent.parent)"` - echo Replacing ${DEREX_RUNNER_PATH} in tests/htmlcov/*.html - sudo npm config set cache $(Pipeline.Workspace)/.cache/npm --global - npm install juice - # Azure pipelines strips style sheets but leaves styles in place. - # juice can embed the styles in the HTML for us and present a much better - # view in the coverage results tab. - for filename in tests/htmlcov/*.html; do $(npm bin)/juice $filename $filename; done - echo "##vso[codecoverage.publish codecoveragetool=Cobertura;summaryfile=${PWD}/tests/coverage.xml;reportdirectory=${PWD}/tests/htmlcov;]" - displayName: Fix coverage result and publish it - condition: succeededOrFailed() - - - job: TestJuniperMinimalProject - timeoutInMinutes: 40 - pool: - vmImage: $(UBUNTU_VERSION) - variables: - CURL: curl --retry-connrefused --connect-timeout 30 --retry 5 --retry-delay 5 -f - steps: - - template: prepare.yml - parameters: - AS_PYTHON_PACKAGE: True - - template: load_images.yml - parameters: - DOCKER_IMAGES_SLUG: juniper - - template: test_project.yml - parameters: - OPENEDX_RELEASE: juniper - PROJECT_TYPE: minimal - - - job: TestJuniperCompleteProject - timeoutInMinutes: 60 - pool: - vmImage: $(UBUNTU_VERSION) - variables: - CURL: curl --retry-connrefused --connect-timeout 30 --retry 5 --retry-delay 5 -f - steps: - - template: prepare.yml - parameters: - AS_PYTHON_PACKAGE: True - - template: load_images.yml - parameters: - DOCKER_IMAGES_SLUG: juniper - - template: test_project.yml - parameters: - OPENEDX_RELEASE: juniper - PROJECT_TYPE: complete - - - job: TestKoaMinimalProject - timeoutInMinutes: 40 - pool: - vmImage: $(UBUNTU_VERSION) - variables: - CURL: curl --retry-connrefused --connect-timeout 30 --retry 5 --retry-delay 5 -f - steps: - - template: prepare.yml - parameters: - AS_PYTHON_PACKAGE: True - - template: load_images.yml - parameters: - DOCKER_IMAGES_SLUG: koa - - template: test_project.yml - parameters: - OPENEDX_RELEASE: koa - PROJECT_TYPE: minimal - - - job: TestKoaCompleteProject - timeoutInMinutes: 60 - pool: - vmImage: $(UBUNTU_VERSION) - variables: - CURL: curl --retry-connrefused --connect-timeout 30 --retry 5 --retry-delay 5 -f - steps: - - template: prepare.yml - parameters: - AS_PYTHON_PACKAGE: True - - template: load_images.yml - parameters: - DOCKER_IMAGES_SLUG: koa - - template: test_project.yml - parameters: - OPENEDX_RELEASE: koa - PROJECT_TYPE: complete - - - job: TestLilacMinimalProject - timeoutInMinutes: 40 - pool: - vmImage: $(UBUNTU_VERSION) - variables: - CURL: curl --retry-connrefused --connect-timeout 30 --retry 5 --retry-delay 5 -f - steps: - - template: prepare.yml - parameters: - AS_PYTHON_PACKAGE: True - - template: load_images.yml - parameters: - DOCKER_IMAGES_SLUG: lilac - - template: test_project.yml - parameters: - OPENEDX_RELEASE: lilac - PROJECT_TYPE: minimal - - - job: CheckDocs - timeoutInMinutes: 5 - pool: - vmImage: $(UBUNTU_VERSION) - steps: - - template: prepare.yml - - script: make docs - displayName: "Compile docs" - - - job: CheckPreCommit - timeoutInMinutes: 5 - pool: - vmImage: $(UBUNTU_VERSION) - steps: - - template: prepare.yml - - script: pre-commit run -a - displayName: "Check pre commit hooks" diff --git a/azure-pipelines/test_project.yml b/azure-pipelines/test_project.yml deleted file mode 100644 index c5656f2b0..000000000 --- a/azure-pipelines/test_project.yml +++ /dev/null @@ -1,18 +0,0 @@ -parameters: - - name: OPENEDX_RELEASE - type: string - - name: PROJECT_TYPE - type: string - -steps: - - template: provision_services.yml - - template: provision_project.yml - parameters: - PROJECT_TYPE: ${{ parameters.PROJECT_TYPE }} - PROJECT_PATH: ${{ format('examples/{0}/{1}', parameters.OPENEDX_RELEASE, parameters.PROJECT_TYPE) }} - PROJECT_NAME: ${{ format('{0}-{1}', parameters.OPENEDX_RELEASE, parameters.PROJECT_TYPE) }} - - template: run_project_tests.yml - parameters: - PROJECT_TYPE: ${{ parameters.PROJECT_TYPE }} - PROJECT_PATH: ${{ format('examples/{0}/{1}', parameters.OPENEDX_RELEASE, parameters.PROJECT_TYPE) }} - PROJECT_NAME: ${{ format('{0}-{1}', parameters.OPENEDX_RELEASE, parameters.PROJECT_TYPE) }} diff --git a/azure-pipelines/test_pyinstaller.yml b/azure-pipelines/test_pyinstaller.yml deleted file mode 100644 index 18b6f94fa..000000000 --- a/azure-pipelines/test_pyinstaller.yml +++ /dev/null @@ -1,50 +0,0 @@ -parameters: - - name: OS - type: string - - name: OPENEDX_RELEASE - type: string - -steps: - - ${{ if eq(parameters.OS, 'ubuntu') }}: - - script: | - pip install pyinstaller - make executable - displayName: "Build binary with pyinstaller" - - - script: | - set -ex - ./bundle/dist/derex - ./bundle/dist/ddc-services ps - cd examples/${{ parameters.OPENEDX_RELEASE }}/minimal - ../../../bundle/dist/ddc-project config - displayName: "Test pyinstaller created binary" - - - task: PublishBuildArtifacts@1 - inputs: - pathToPublish: ./bundle/dist/ - artifactName: LinuxBinary - - - ${{ if eq(parameters.OS, 'macos') }}: - - script: | - pip install pyinstaller scrypt - # The Openssl version on MacOS 10.14 does not support scrypt - # so we pip install it and leave a trace to pyinstaller to pick it up - echo -e "\nimport scrypt" >> bundle/executable.py - make executable - displayName: "Build binary with pyinstaller" - - # Currently Azure Pipelines doesn't support Docker on MacOS - # so we can't really test the binary - - script: | - #set -ex - ./bundle/dist/derex --help - ./bundle/dist/ddc-services --help - cd examples/${{ parameters.OPENEDX_RELEASE }}/minimal - # ../../../bundle/dist/ddc-project config - true - displayName: "Test pyinstaller created binary" - - - task: PublishBuildArtifacts@1 - inputs: - pathToPublish: ./bundle/dist/ - artifactName: MacOSBinary