From 95bedd48a90c61e7be0069e8fd48e22d7b4f9032 Mon Sep 17 00:00:00 2001 From: Garrett Rabian Date: Mon, 18 Nov 2024 09:05:10 -0500 Subject: [PATCH] add flows for creating and removing reportS --- .github/workflows/delete-pages.yml | 50 +++++++++++++ .github/workflows/deploy.yml | 50 +++++++++++++ rm_old_folders.py | 113 +++++++++++++++++++++++++++++ 3 files changed, 213 insertions(+) create mode 100755 .github/workflows/delete-pages.yml create mode 100755 rm_old_folders.py diff --git a/.github/workflows/delete-pages.yml b/.github/workflows/delete-pages.yml new file mode 100755 index 000000000..30aa63d83 --- /dev/null +++ b/.github/workflows/delete-pages.yml @@ -0,0 +1,50 @@ +name: Delete old folders from GitHub Pages + +on: + push: + branches: + - "gh-pages" + schedule: + - cron: '0 0 * * *' # This will run the workflow daily at midnight UTC + +jobs: + delete_old_folders: + runs-on: ubuntu-latest + permissions: + contents: write + + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: gh-pages + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: 3.9 + + - name: Get current directory + run: echo "CURRENT_DIR=$(pwd)" >> $GITHUB_ENV + + - name: Run the script + run: python rm_old_folders.py --n-days 30 --folder-name "${{ env.CURRENT_DIR }}" + + - name: Commit all changed files back to the repository + uses: stefanzweifel/git-auto-commit-action@v5 + with: + branch: gh-pages + commit_message: Delete folders older than 30 days + + notify_on_delete_pages_failure: + runs-on: ubuntu-latest + needs: + - delete_old_folders + if: failure() + steps: + - name: Slack Notification + uses: rtCamp/action-slack-notify@v2 + env: + SLACK_TITLE: ":boom: The nightly delete of expired Playwright reports job has failed in ${{ github.repository }}." + MSG_MINIMAL: true + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}" diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 44c9f6eda..c954de09a 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -326,6 +326,55 @@ jobs: path: playwright-report # path on runner retention-days: 30 + upload-reports: + name: Upload Reports + needs: + - test + if: ${{ always() && github.ref_name != 'production' }} + runs-on: ubuntu-latest + outputs: + timestamp: ${{ steps.timestampid.outputs.timestamp }} + steps: + # create a unique folder name to put playwright reports in + - name: Set a Timestamp + id: timestampid + run: echo "timestamp=$(date --utc +%Y%m%d_%H%M%SZ)" >> "$GITHUB_OUTPUT" + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version-file: ".nvmrc" + - name: Install dependencies + run: yarn install + # downloads artifact created from the test job + - name: Download reports from GitHub Actions Artifacts + uses: actions/download-artifact@v4 + with: + name: playwright-html-report # download from previous job + path: downloaded-html-report # save as this when downloaded + - name: Push files to github pages + uses: peaceiris/actions-gh-pages@v4 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./downloaded-html-report # publish downloaded dir to github pages + destination_dir: ${{ steps.timestampid.outputs.timestamp }} + # need to extract just org name for reassembling the github pages URL + - name: Extract Organization Name + id: extract-org + run: | + echo "ORG_NAME=$(echo $GITHUB_REPOSITORY | cut -d'/' -f1)" >> $GITHUB_ENV + echo "org name: ${ORG_NAME}" + # need to extract just the repo name for reassembling the github pages URL + - name: Extract Repository Name + id: extract-repo + run: | + echo "REPO_NAME=$(echo $GITHUB_REPOSITORY | cut -d'/' -f2)" >> $GITHUB_ENV + echo "repo name: ${REPO_NAME}" + # assembles org name, repo name, and unique timestamp to link to github pages url that was published + - name: Write URL in Summary + run: | + echo "## Playwright Test Results" >> $GITHUB_STEP_SUMMARY + echo "https://${ORG_NAME}.github.io/${REPO_NAME}/${{ steps.timestampid.outputs.timestamp }}/" >> $GITHUB_STEP_SUMMARY + cleanup: name: Delist GHA Runner CIDR Blocks if: ${{ github.ref_name != 'main' && github.ref_name != 'val' && github.ref_name != 'production' }} @@ -334,6 +383,7 @@ jobs: - register-runner - a11y-tests - e2e-test + - test env: SLS_DEPRECATION_DISABLE: "*" # Turn off deprecation warnings in the pipeline steps: diff --git a/rm_old_folders.py b/rm_old_folders.py new file mode 100755 index 000000000..75f78807c --- /dev/null +++ b/rm_old_folders.py @@ -0,0 +1,113 @@ +import argparse +import os +import re +from datetime import datetime, timedelta +import shutil + +def find_old_folders(n_days, directory): + """ + Find folders in the specified directory that are older than n_days. + + Args: + directory (str): The directory to search for folders. + n_days (int): The number of days to determine which folders to delete. + + Returns: + list: List of folder names older than n_days. + """ + current_time = datetime.utcnow() + folder_name_regex = re.compile(r'^\d{8}_\d{6}Z$') + + old_folders = [] + for entry in os.scandir(directory): + if entry.is_dir() and re.match(folder_name_regex, entry.name): + try: + folder_date = datetime.strptime(entry.name, "%Y%m%d_%H%M%SZ") + time_difference = current_time - folder_date + if time_difference > timedelta(days=n_days): + old_folders.append(entry.name) + else: + print( + f"SKIPPED --- Folder '{entry.name}' is not older than " + f"{n_days} days. It will not be deleted." + ) + except ValueError: + print( + f"SKIPPED --- Error parsing timestamp for folder '{entry.name}'. " + f"It will not be deleted." + ) + else: + print( + f"SKIPPED --- Found folder/file with name '{entry.name}' that does " + f"not match the expected timestamp format. It will not be deleted." + ) + + return old_folders + +def is_valid_directory(base_directory, folder_path): + """ + Check if the folder_path is a valid directory within the base_directory. + + Args: + base_directory (str): The base directory. + folder_path (str): The path of the folder to validate. + + Returns: + bool: True if the folder_path is valid, False otherwise. + """ + # Resolve absolute paths + base_directory = os.path.abspath(base_directory) + folder_path = os.path.abspath(folder_path) + + # Ensure that the folder_path starts with the base_directory + return folder_path.startswith(base_directory) + +def delete_folders(base_directory, folder_names): + """ + Delete specified folders and their contents in the given directory. + + Args: + base_directory (str): The base directory containing the folders to delete. + folder_names (list): List of folder names to delete. + """ + for folder_name in folder_names: + folder_path = os.path.join(base_directory, folder_name) + if is_valid_directory(base_directory, folder_path): + try: + shutil.rmtree(folder_path) + print( + f"DELETED --- Folder '{folder_name}' and its contents have " + f"been deleted." + ) + except FileNotFoundError: + print(f"Folder '{folder_name}' not found.") + except Exception as e: + print(f"Error deleting folder '{folder_name}': {e}") + else: + print(f"SKIPPED --- Invalid folder path: '{folder_path}'") + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Delete old folders in a specified directory." + ) + parser.add_argument( + "--n-days", + type=int, + required=True, + help="Number of days (days older than current date) to determine " + "which folders to delete." + ) + parser.add_argument( + "--folder-name", + type=str, + required=True, + help="Full path to the directory where reports are located." + ) + args = parser.parse_args() + + # Ensure the provided folder name is an absolute path + if not os.path.isabs(args.folder_name): + raise ValueError("The folder name must be an absolute path.") + + old_folders = find_old_folders(args.n_days, args.folder_name) + delete_folders(args.folder_name, old_folders)