diff --git a/.dockerignore b/.dockerignore index 46385a06..2b51c8d5 100644 --- a/.dockerignore +++ b/.dockerignore @@ -13,5 +13,4 @@ node_modules src/*.egg-info src/aurora/staticfiles/ Makefile -README.md manage.py diff --git a/.env.example b/.env.example new file mode 100644 index 00000000..34e85280 --- /dev/null +++ b/.env.example @@ -0,0 +1,11 @@ +SECRET_KEY=secret-key + +DATABASE_URL=psql://postgres:postgres@db:5432/postgres +CACHE_DEFAULT=redis://redis:6379/0 +STATIC_URL=/static/ +STATIC_ROOT=/var/static/ + +POSTGRES_DB=postgres +POSTGRES_USER=postgres +POSTGRES_PASSWORD=postgres +POSTGRES_HOST=db diff --git a/.flake8 b/.flake8 index 918e4d4d..ef07025b 100644 --- a/.flake8 +++ b/.flake8 @@ -3,9 +3,20 @@ max-complexity = 20 max-line-length = 120 exclude = ~* -ignore = E401,W391,E128,E261,E731,Q000,W504,W606,W503 -putty-ignore = + .venv, + venv, + .git, + __pycache__, + build, + dist, + migrations, + snapshots, + __pypackages__, + +ignore = E401,W391,E128,E261,E731,Q000,W504,W606,W503,E203 +;putty-ignore = ; tests/test_choice_as_instance.py : E501 per-file-ignores = */__init__.py:F401,F403 + */migrations/*:E501 diff --git a/.github/actions/last_commit/action.yml b/.github/actions/last_commit/action.yml new file mode 100644 index 00000000..e42013e2 --- /dev/null +++ b/.github/actions/last_commit/action.yml @@ -0,0 +1,31 @@ +name: 'Get Last commit' +description: '' + + +outputs: + last_commit_sha: + description: 'last_commit_sha' + value: ${{ steps.result.outputs.last_commit_sha }} + last_commit_short_sha: + description: 'last_commit_short_sha' + value: ${{ steps.result.outputs.last_commit_short_sha }} + +runs: + using: "composite" + steps: + - name: Setup Environment (PR) + if: ${{ github.event_name == 'pull_request' }} + shell: bash + run: | + echo "LAST_COMMIT_SHA=${{ github.event.pull_request.head.sha }}" >> $GITHUB_ENV + - name: Setup Environment (Push) + if: ${{ github.event_name == 'push' }} + shell: bash + run: | + echo "LAST_COMMIT_SHA=${GITHUB_SHA}" >> $GITHUB_ENV + - id: result + shell: bash + run: | + raw=${{env.LAST_COMMIT_SHA}} + echo "last_commit_sha=$raw" >> $GITHUB_OUTPUT + echo "last_commit_short_sha=${raw::8}" >> $GITHUB_OUTPUT diff --git a/.github/file-filters.yml b/.github/file-filters.yml new file mode 100644 index 00000000..b1dc9cd5 --- /dev/null +++ b/.github/file-filters.yml @@ -0,0 +1,45 @@ +# This is used by the action https://github.com/dorny/paths-filter +docker: &docker + - added|modified: './docker/**/*' + - added|modified: './docker/*' + +dependencies: &dependencies + - 'pdm.lock' + - 'pyproject.toml' + +actions: &actions + - added|modified: './.github/**/*' + +python: &python + - added|modified: 'src/**' + - added|modified: 'tests/**' + - 'manage.py' + +changelog: + - added|modified: 'changes/**' + - 'CHANGELOG.md' + +mypy: + - *python + - 'mypy.ini' + +run_tests: + - *actions + - *python + - *docker + - *dependencies + - 'pytest.ini' + +migrations: + - added|modified: 'src/**/migrations/*' + +lint: + - *python + - '.flake8' + - 'pyproject.toml' + +docs: + - added|modified: './docs/**/*' + - modified: './src/aurora/config/__init__.py' + - modified: './github/workflows/docs.yml' + - modified: './github/file-filters.yml' diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml deleted file mode 100644 index d112c183..00000000 --- a/.github/workflows/docker.yml +++ /dev/null @@ -1,23 +0,0 @@ -on: - push: - branches: ['release'] - -env: - REGISTRY: ghcr.io - IMAGE_NAME: ${{ github.repository }} - -jobs: - build-base: - runs-on: ubuntu-latest - permissions: - contents: read - packages: write - - steps: - - name: Checkout repository - uses: actions/checkout@v2 - - - name: Build and push Docker image - run: > - echo `pwd`/docker -# docker build . --file Dockerfile --tag my-image-name:$(date +%s) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 00000000..99af2932 --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,86 @@ +name: "Documentation" + +on: + push: + branches: + - develop + - master + schedule: + - cron: '37 23 * * 2' + +permissions: + contents: read + pages: write + id-token: write + +concurrency: + group: "pages" + cancel-in-progress: false + +jobs: + changes: + runs-on: ubuntu-latest + timeout-minutes: 1 + defaults: + run: + shell: bash + outputs: + docs: ${{ steps.changed_files.outputs.docs }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + - id: changed_files + name: Check for file changes + uses: dorny/paths-filter@v3 + with: + base: ${{ github.ref }} + token: ${{ github.token }} + filters: .github/file-filters.yml + generate: + name: Generate + if: needs.changes.outputs.docs == 'true' + needs: changes + runs-on: ubuntu-latest + env: + PYTHONPATH: src/ + steps: + - uses: actions/setup-python@v5 + with: + python-version: '3.12' + - uses: actions/checkout@v4 + - uses: yezz123/setup-uv@v4 + + - uses: actions/cache/restore@v4 + id: restore-cache + with: + path: .venv + key: ${{ runner.os }}-${{ hashFiles('**/uv.lock') }} + + - name: Install dependencies + run: uv sync --extra docs + + - name: Build Doc + run: .venv/bin/mkdocs build -d ./docs-output + + - uses: actions/cache/save@v4 + id: cache + if: always() && steps.restore-cache.outputs.cache-hit != 'true' + with: + path: .venv + key: ${{ runner.os }}-${{ hashFiles('**/uv.lock') }} + - name: Upload artifact + uses: actions/upload-pages-artifact@v3 + with: + path: ./docs-output + + # Deployment job + deploy: + needs: generate + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + runs-on: ubuntu-latest + steps: + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v4 diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 00000000..75318524 --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,56 @@ +name: Lint +on: + push: + branches: + - '**' +# pull_request: +# branches: [develop, master] +# types: [synchronize, opened, reopened, ready_for_review] + +defaults: + run: + shell: bash + + +concurrency: + group: "${{ github.workflow }}-${{ github.ref }}" + cancel-in-progress: true + + +permissions: + contents: read + +jobs: + changes: + if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name + name: check files + runs-on: ubuntu-latest + timeout-minutes: 3 + outputs: + lint: ${{ steps.changes.outputs.lint }} + docker: ${{ steps.changes.outputs.docker_base }} + steps: + - run: git config --global --add safe.directory $(realpath .) + - uses: actions/checkout@v4 + - id: changes + name: Check for backend file changes + uses: dorny/paths-filter@v3 + with: + base: ${{ github.ref }} + token: ${{ github.token }} + filters: .github/file-filters.yml + + ruff: + needs: changes + runs-on: ubuntu-latest + if: github.event.pull_request.draft == false && needs.changes.outputs.lint + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: '3.12' + - name: Install requirements + run: pip install ruff + - name: Check syntax + # Stop the build if there are Python syntax errors or undefined names + run: ruff check -e diff --git a/.github/workflows/security.yml b/.github/workflows/security.yml new file mode 100644 index 00000000..4c000742 --- /dev/null +++ b/.github/workflows/security.yml @@ -0,0 +1,79 @@ +name: Security +on: + push: + branches: + - develop + - master + - staging + - release/* + - feature/* + - bugfix/* + - hotfix/* +# pull_request: +# branches: [develop, master] +# types: [synchronize, opened, reopened, ready_for_review] + +defaults: + run: + shell: bash + + +concurrency: + group: "${{ github.workflow }}-${{ github.ref }}" + cancel-in-progress: true + + +permissions: + contents: read + +jobs: + changes: + if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name + name: check files + runs-on: ubuntu-latest + timeout-minutes: 3 + env: + GIT_DISCOVERY_ACROSS_FILESYSTEM: 1 + outputs: + lint: ${{ steps.changes.outputs.lint }} + docker: ${{ steps.changes.outputs.docker_base }} + steps: + - run: git config --global --add safe.directory $(realpath .) + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + - id: changes + name: Check for backend file changes + uses: dorny/paths-filter@0bc4621a3135347011ad047f9ecf449bf72ce2bd # v3.0.0 + with: + base: ${{ github.ref }} + token: ${{ github.token }} + filters: .github/file-filters.yml + + bandit: + needs: changes + runs-on: ubuntu-latest + if: github.event.pull_request.draft == false && needs.changes.outputs.lint + permissions: + contents: read # for actions/checkout to fetch code + security-events: write # for github/codeql-action/upload-sarif to upload SARIF results + actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status + steps: + - uses: actions/checkout@v4 + - name: Bandit Scan + uses: shundor/python-bandit-scan@9cc5aa4a006482b8a7f91134412df6772dbda22c + with: # optional arguments + # exit with 0, even with results found + exit_zero: true # optional, default is DEFAULT + # Github token of the repository (automatically created by Github) + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information. + # File or directory to run bandit on + path: src # optional, default is . + # Report only issues of a given severity level or higher. Can be LOW, MEDIUM or HIGH. Default is UNDEFINED (everything) + # level: # optional, default is UNDEFINED + # Report only issues of a given confidence level or higher. Can be LOW, MEDIUM or HIGH. Default is UNDEFINED (everything) + # confidence: # optional, default is UNDEFINED + # comma-separated list of paths (glob patterns supported) to exclude from scan (note that these are in addition to the excluded paths provided in the config file) (default: .svn,CVS,.bzr,.hg,.git,__pycache__,.tox,.eggs,*.egg) + # excluded_paths: # optional, default is DEFAULT + # comma-separated list of test IDs to skip + # skips: # optional, default is DEFAULT + # path to a .bandit file that supplies command line arguments + # ini_path: # optional, default is DEFAULT diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 8a9369b9..5b1c02e2 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -3,70 +3,182 @@ name: Test on: push: branches: - - master - develop - pull_request: + - master + - staging + - release/* + - feature/* + - bugfix/* + - hotfix/* +# pull_request: +# branches: [ develop, master ] +# types: [ synchronize, opened, reopened, ready_for_review ] + +concurrency: + group: "${{ github.workflow }}-${{ github.ref }}" + cancel-in-progress: true + +defaults: + run: + shell: bash + +permissions: + id-token: write + attestations: write + jobs: -# lint: +# changes: +# if: (github.event_name != 'pull_request' +# || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name) +# || github.event_name == 'create' # runs-on: ubuntu-latest +# timeout-minutes: 1 +# defaults: +# run: +# shell: bash +# outputs: +# run_tests: ${{ steps.changes.outputs.run_tests }} # steps: -# - uses: actions/checkout@v2 -# - uses: actions/setup-python@v2 -# -# - name: Install dependencies -# run: | -# python -m pip install --upgrade pip pre-commit -# - name: -# run: pre-commit run --all-files - +# - name: Checkout code +# uses: actions/checkout@v4.1.7 +# - id: changes +# name: Check for file changes +# uses: dorny/paths-filter@0bc4621a3135347011ad047f9ecf449bf72ce2bd # v3.0.0 +# with: +# base: ${{ github.ref }} +# token: ${{ github.token }} +# filters: .github/file-filters.yml test: +# needs: [ changes ] +# if: needs.changes.outputs.run_tests == 'true' runs-on: ubuntu-latest + outputs: + image: ${{ env.IMAGE }} + commit: ${{env.sha_short}} + build_date: ${{env.BUILD_DATE}} + branch: ${{env.BRANCH}} services: - postgres: - image: postgres:12 + redis: + image: redis + ports: + - 16379:6379 + db: + image: postgres:14 env: - POSTGRES_USER: postgres + POSTGRES_DATABASE: aurora POSTGRES_PASSWORD: postgres - POSTGRES_DB: aurora + POSTGRES_USERNAME: postgres ports: - - 5432:5432 - options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - strategy: - fail-fast: false - matrix: - django-version: [ "3.2", ] - python-version: [ "3.9", ] - experimental: [ false ] - include: - - django-version: "4.2" - python-version: "3.9" - experimental: true - continue-on-error: ${{ matrix.experimental }} + - 15432:5432 + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 env: - DATABASE_URL: postgres://postgres:postgres@127.0.0.1:5432/aurora - STATIC_URL: /static/ - STATIC_ROOT: - PY_VER: ${{ matrix.python-version}} - DJ_VER: ${{ matrix.django-version}} - SECRET_KEY: super-secret-key + DOCKER_DEFAULT_PLATFORM: linux/amd64 + DOCKER_METADATA_ANNOTATIONS_LEVELS: manifest,index + DATABASE_URL: postgres://postgres:postgres@localhost:15432/aurora + CELERY_BROKER_URL: redis://localhost:16379/0" + CACHE_URL: redis://localhost:16379/0 + DOCKER_BUILDKIT: 1 steps: - - uses: actions/checkout@v2 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + - name: Checkout code + uses: actions/checkout@v4 + - name: Hack container for local development + if: ${{ env.ACT }} + run: | + echo /home/runner/externals/node20/bin >> $GITHUB_PATH + - name: Hack container for local development + run: | + echo BRANCH="${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_ENV + - name: Docker meta + id: meta + uses: docker/metadata-action@v5.5.1 + with: + images: "unicef/hope-aurora" + tags: | + type=ref,event=branch + type=ref,event=pr + type=ref,event=tag + type=semver,pattern={{version}} + type=semver,pattern={{raw}} + - name: DockerHub login + uses: docker/login-action@v3 with: - python-version: ${{ matrix.python-version }} + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - id: last_commit + uses: ./.github/actions/last_commit + - id: calc + shell: bash + run: | + set -x + IMAGE=$(echo '${{env.DOCKER_METADATA_OUTPUT_JSON}}' | jq '.tags[0]') + echo "sha_short=$(git rev-parse --short HEAD)" >> $GITHUB_ENV + echo "BUILD_DATE=$(date +"%Y-%m-%d %H:%M" )" >> $GITHUB_ENV + echo "IMAGE=$IMAGE" >> $GITHUB_ENV - - name: Install poetry - run: python -m pip install --upgrade poetry + - name: Build Test Image + run: | + docker build \ + --target tests \ + -t ${{env.IMAGE}} \ + --cache-from "type=gha" \ + --cache-to "type=gha,mode=max" \ + -f docker/Dockerfile . - - name: Install dependencies - run: poetry install + - name: Run Test suite + run: | + mkdir output + docker run --rm \ + --network host \ + -e PYTHONPATH=/app/src \ + -e DATABASE_URL=${DATABASE_URL} \ + -e CELERY_BROKER_URL=${CELERY_BROKER_URL} \ + -e CACHE_URL=${CACHE_URL} \ + -e SECRET_KEY=super-secret-key-just-for-testing \ + -v "./output/:/app/output" \ + -v "./src/:/app/src" \ + -v "./tests:/app/tests" \ + -v "./pytest.ini:/app/pytest.ini" \ + -t ${{env.IMAGE}} \ + pytest tests/ -n auto -v --maxfail=5 --migrations --cov-report xml:./output/coverage.xml - - name: Run tests - run: poetry run pytest tests/ --create-db + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v4 + continue-on-error: true + with: + env_vars: OS,PYTHON + fail_ci_if_error: true + files: /app/output/coverage.xml + token: ${{ secrets.CODECOV_TOKEN }} + verbose: false + name: codecov-${{env.GITHUB_REF_NAME}} - - uses: codecov/codecov-action@v1 + release: + needs: [ test ] + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + - name: DockerHub login + uses: docker/login-action@v3 with: - verbose: false # optional (default = false) + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Build Distro + run: | + docker build \ + --target dist \ + --cache-from "type=gha" \ + --cache-to "type=gha,mode=max" \ + --build-arg "VERSION=${{needs.test.outputs.commit}}" \ + --build-arg "BUILD_DATE=${{needs.test.outputs.build_date}}" \ + --build-arg "BRANCH=${{needs.test.outputs.branch}}" \ + -t ${{needs.test.outputs.image}} \ + -f docker/Dockerfile . + docker push ${{needs.test.outputs.image}} + docker inspect ${{needs.test.outputs.image}} | jq -r '.[0].Config.Labels' + echo "::notice::✅ Image ${{needs.test.outputs.image}} built and pushed" diff --git a/.github/workflows/trivy.yml b/.github/workflows/trivy.yml deleted file mode 100644 index 6576315c..00000000 --- a/.github/workflows/trivy.yml +++ /dev/null @@ -1,34 +0,0 @@ -name: Trivy - -on: - push: - branches: - - develop - pull_request: - -jobs: - trivy: - runs-on: ubuntu-latest - strategy: - fail-fast: false - env: - STATIC_URL: /static/ - STATIC_ROOT: - steps: - - name: Install Trivy - run: | - curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sh -s -- -b /usr/local/bin v0.28.0 - - - uses: actions/checkout@v2 - - - name: Install poetry - run: python -m pip install --upgrade poetry - - - name: Install dependencies - run: poetry install - - - name: Scan - run: | - trivy fs \ - --exit-code 0 \ - src/ diff --git a/.gitignore b/.gitignore index 9edeeb50..929ad1cd 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ .* +!.git ~* *.min.min.js *.py[ico] @@ -17,7 +18,12 @@ package.json !.isort.cfg !.flake8 !.pre-commit-config.yaml +!.secrets.baseline .env +.venv +__packages__ docker/conf/nginx.conf docker/conf/redis.conf src/aurora/staticfiles + +.pdm-python diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3eadb537..b0ee70b3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,57 +1,30 @@ -exclude: '^$' -fail_fast: false repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.1.0 + rev: v5.0.0 hooks: - # - id: double-quote-string-fixer - # stages: [commit] - - - id: debug-statements - stages: [commit] - - id: end-of-file-fixer - exclude: .bumpversion.cfg - stages: [commit] - - - id: check-merge-conflict - stages: [commit] - - - id: check-case-conflict - stages: [commit] - - - repo: https://github.com/saxix/pch - rev: fc387f44c7c1a51b762a5866ce9d954900e5af68 + - id: trailing-whitespace + - repo: https://github.com/adamchainz/djade-pre-commit + rev: 1.3.2 hooks: - - id: check-missed-migrations + - id: djade args: - - src - stages: [commit] - - - id: check-untracked - args: - - src - - tests - stages: [push] - - - id: check-forbidden + - --target-version + - '5.1' + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.8.4 + hooks: + - id: ruff + - id: ruff-format args: - - -p - - /\.showbrowser\(/ - - -p - - /print\(111/ - stages: [commit] - -# - repo: https://github.com/psf/black -# rev: 22.1.0 + - --check +# - repo: https://github.com/saxix/pch +# rev: v0.1 # hooks: -# - id: black -# stages: [ commit ] - - - repo: https://github.com/PyCQA/flake8 - rev: 4.0.1 - hooks: - - id: flake8 - additional_dependencies: - - flake8-black>=0.1.1 - language_version: python3 +# - id: check-missed-migrations +# args: +# - src +# stages: +# - pre-commit +# additional_dependencies: +# - setuptools diff --git a/.secrets.baseline b/.secrets.baseline new file mode 100644 index 00000000..e69de29b diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..94621be2 --- /dev/null +++ b/LICENSE @@ -0,0 +1,674 @@ +Copyright (c) 2014 - 2024 UNICEF. All rights reserved. + +This program is free software: you can redistribute it and/or modify +it under the terms of the GNU Affero General Public License as +published by the Free Software Foundation, either version 3 of the +License, or (at your option) any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU Affero General Public License below for more details. + +------------------------------------------------------------------------ + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +. diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 00000000..d64133c0 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,21 @@ +exclude * + +include README.md +include MANIFEST.in +include LICENSE.md +include pyproject.toml +include *.py +include uv.lock + +recursive-include src/aurora * +recursive-include src/aurora *.html +recursive-include src/dbtemplates * +recursive-include src/dbtemplates *.html + +recursive-exclude tests * + +prune **/~* +global-exclude .* +global-exclude *~ +global-exclude ~* +exclude manage.py diff --git a/README.md b/README.md index 5c6a1ed0..defe2119 100644 --- a/README.md +++ b/README.md @@ -1,31 +1,15 @@ # Aurora -Aurora is an open source project to collect and register data. -It is focused mainly on performance and security. +[![Test](https://github.com/unicef/hope-aurora/actions/workflows/test.yml/badge.svg)](https://github.com/unicef/hope-aurora/actions/workflows/test.yml) +[![Lint](https://github.com/unicef/hope-aurora/actions/workflows/lint.yml/badge.svg)](https://github.com/unicef/hope-aurora/actions/workflows/lint.yml) +[![codecov](https://codecov.io/github/unicef/hope-aurora/graph/badge.svg?token=FBUB7HML5S)](https://codecov.io/github/unicef/hope-aurora) +[![Documentation](https://github.com/unicef/hope-aurora/actions/workflows/docs.yml/badge.svg)](https://unicef.github.io/hope-aurora/) +[![Docker Pulls](https://img.shields.io/docker/pulls/unicef/hope-aurora)](https://hub.docker.com/repository/docker/unicef/hope-aurora/tags) -### Run the code -- Option 1: with local machine services (redis, postgres) with `direnv` +Aurora is the official HOPE online registration tool, it is released as open source project, feel free to contribute and use it. -First configure your `.envrc` and run +It has be development taking performance and security as main key points -```shell - python manage.py runserver -```` - -- Option 2: using docker-composer - -For the first time you need to run in root project directory - -```shell -./manage env --comment --defaults > .env -docker-compose build -docker-compose up -``` - -each next time - -```shell -docker-compose up -``` +Please read more about Aurora and HOPE in the [official documentation](https://unicef.github.io/hope-documentation/) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index ea24d44d..5f862270 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -72,7 +72,7 @@ stages: repository: $(Docker.repository) buildContext: "**/.." # arguments: '--build-arg VERSION=$(Build.SourceVersion) --build-arg BUILD_DATE="$(buildDate)" --cache-from=$(Docker.registry).azurecr.io/$(Docker.repository):latest' - arguments: '--build-arg VERSION="$(commitHash)" --build-arg BUILD_DATE="$(buildDate)" --cache-from=$(Docker.registry).azurecr.io/$(Docker.repository):latest' + arguments: '--build-arg GIT_HASH="$(commitHash)" --build-arg BUILD_DATE="$(buildDate)" --cache-from=$(Docker.registry).azurecr.io/$(Docker.repository):latest' tags: | $(Build.BuildId) $(Build.SourceVersion) @@ -109,18 +109,20 @@ stages: -CONSTANCE_DATABASE_CACHE_BACKEND $(CONSTANCE_DATABASE_CACHE_BACKEND) -ALLOWED_HOSTS $(ALLOWED_HOSTS) -SECRET_KEY $(SECRET_KEY) - -SENTRY_DSN $(SENTRY_DSN) - -ADMIN_EMAIL $(ADMIN_EMAIL) - -ADMIN_PASSWORD $(ADMIN_PASSWORD) - -ROOT_TOKEN $(ROOT_TOKEN) -DEBUG $(DEBUG) -BUILD_VERSION $(Build.SourceVersion) -LANGUAGE_CODE $(LANGUAGE_CODE) -USE_X_FORWARDED_HOST $(USE_X_FORWARDED_HOST) - -LANGUAGE_CODE $(LANGUAGE_CODE) + -ROOT_TOKEN $(ROOT_TOKEN) + -ADMIN_EMAIL $(ADMIN_EMAIL) + -ADMIN_PASSWORD $(ADMIN_PASSWORD) + -EMAIL_HOST_USER $(EMAIL_HOST_USER) + -MAILJET_API_KEY $(MAILJET_API_KEY) + -MAILJET_SECRET_KEY $(MAILJET_SECRET_KEY) -AZURE_CLIENT_ID $(KEY_VALUT_AZURE_CLIENT_ID) -AZURE_CLIENT_KEY $(KEY_VALUT_AZURE_CLIENT_ID) -AZURE_CLIENT_SECRET $(KEY_VALUT_AZURE_CLIENT_SECRET) -AZURE_TENANT_ID $(KEY_VALUT_AZURE_TENANT_KEY) - -MATOMO_ID $(MATOMO_ID) - -SENTRY_ENVIRONMENT $(SENTRY_ENVIRONMENT)" + -SENTRY_DSN $(SENTRY_DSN) + -SENTRY_ENVIRONMENT $(SENTRY_ENVIRONMENT) + -MATOMO_ID $(MATOMO_ID)" diff --git a/compose.yml b/compose.yml new file mode 100644 index 00000000..220a7950 --- /dev/null +++ b/compose.yml @@ -0,0 +1,58 @@ +volumes: + db: + +services: + db: + image: postgres:16 + environment: + - PGUSER=aurora + - POSTGRES_USER=aurora + - POSTGRES_PASSWORD=password + - POSTGRES_DB=aurora + + redis: + image: redis:7 + restart: unless-stopped + + app: + image: unicef/hope-aurora:2.0-dist + stdin_open: true + tty: true + environment: + - ADMIN_EMAIL=admin@example.com + - ADMIN_PASSWORD=password + - ALLOWED_HOSTS=app,localhost,127.0.0.1 + - CACHE_URL=redis://redis:6379/1?client_class=django_redis.client.DefaultClient + - CACHE_DEFAULT=redis://redis:6379/2 + - CELERY_BROKER_URL=redis://redis:6379/9 + - CSRF_COOKIE_SECURE=False + - CSRF_TRUSTED_ORIGINS=http://localhost:8000,http://localhost + - DATABASE_URL=postgres://aurora:password@db:5432/aurora + - DEFAULT_ORGANIZATION=UNICEF + - DEBUG=true + - DJANGO_ADMIN_URL=admin/ + - FERNET_KEY=3bfbbad7d5e149e9b313fd47d33db5e6 + - MEDIA_ROOT=/var/storage/media/ + - SECRET_KEY=super_secret_key_just_for_development_that_needs_to_be_more_than_fifty_characters + - SECURE_HSTS_PRELOAD=0 + - SECURE_SSL_REDIRECT=False + - SESSION_COOKIE_DOMAIN=localhost:8000 + - SESSION_COOKIE_SECURE=False + - SOCIAL_AUTH_REDIRECT_IS_HTTPS=False + - STORAGE_STATIC=django.core.files.storage.FileSystemStorage + - STORAGE_MEDIA=django.core.files.storage.FileSystemStorage + - STORAGE_DEFAULT=django.core.files.storage.FileSystemStorage + - STATIC_ROOT=/var/storage/static/ + - STATIC_URL=/static/ + - USE_HTTPS=false + - USE_X_FORWARDED_HOST=false +# build: +# context: ./ +# dockerfile: ./docker/Dockerfile +# target: dev + ports: + - "8000:8000" # expose nginx here +# - "8000:8000" + depends_on: + - db + - redis diff --git a/docker-compose.yml b/docker-compose.yml deleted file mode 100644 index 671bca04..00000000 --- a/docker-compose.yml +++ /dev/null @@ -1,32 +0,0 @@ -version: '3.7' - -volumes: - db: - -services: - backend: - stdin_open: true - tty: true - env_file: - - .env - build: - context: ./ - dockerfile: ./docker/Dockerfile - ports: - - "8000:8000" - volumes: - - ./:/code/ - command: "dev" - depends_on: - - db - db: - image: mdillon/postgis:11-alpine - volumes: - - db:/var/lib/postgresql/data - env_file: - - .env - redis: - image: redis:4.0.11-alpine3.8 - restart: unless-stopped - expose: - - "6379" diff --git a/docker/Dockerfile b/docker/Dockerfile index f7a7e436..f10bae99 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,70 +1,151 @@ -FROM python:3.9.16-slim-bullseye +FROM python:3.12-slim-bookworm AS base ARG BUILD_DATE ARG VERSION - -WORKDIR /code/ +ARG GIT_SHA RUN apt-get update \ && apt-get install -y --no-install-recommends \ - build-essential \ - locales \ - nginx \ - locales-all \ + gdal-bin \ gettext \ - libjpeg-dev \ - zlib1g-dev \ git-core \ + locales \ + locales-all \ + nginx \ postgresql-client \ - libpq-dev \ - python3-psycopg2 \ + postgresql-client-15 \ python3-gdal \ - redis-server \ - gdal-bin \ - curl \ + python3-psycopg2 \ && locale-gen --no-purge uk_UA.UTF-8 \ && apt-get clean -ENV PATH="${PATH}:/root/.local/bin" \ - ADMINS="" \ - BUILD_DATE=${BUILD_DATE} \ - CACHE_DEFAULT="redis://127.0.0.1/0" \ - CONSTANCE_DATABASE_CACHE_BACKEND="" \ - DATABASE_URL="" \ - DEFAULT_ORGANIZATION="UNICEF"\ - DJANGO_SETTINGS_MODULE="aurora.config.settings" \ - IPSTACK_KEY="" \ - LOG_LEVEL="ERROR" \ - MAINTENANCE_MODE="off" \ - NGINX_CACHE_DIR="/data/nginx/cache" \ - NGINX_MAX_BODY_SIZE=30M \ - REDIS_LOGLEVEL="warning" \ - REDIS_MAXMEMORY="100Mb" \ - REDIS_MAXMEMORY_POLICY="volatile-ttl" \ - PYTHONPATH="/code/src/" \ + +FROM base AS builder +RUN set -x \ + && buildDeps="build-essential \ +cmake \ +curl \ +gcc \ +git \ +libfontconfig1 \ +libgconf-2-4 \ +libglib2.0-0 \ +libnss3 \ +libssl-dev \ +libjpeg-dev \ +libpq-dev \ +libxml2-dev \ +python3-dev \ +zlib1g-dev \ +" \ + && apt-get update \ + && apt-get install -y --no-install-recommends $buildDeps \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* \ + && pip install uv uwsgi + + +# ------- tests ------- +FROM builder AS tests +ARG BUILD_DATE +ARG VERSION +ARG GIT_SHA + +LABEL distro="tests" +LABEL org.opencontainers.image.created="$BUILD_DATE" +LABEL org.opencontainers.image.version="$VERSION" + +ENV PATH=/venv/bin:/usr/local/bin/:/usr/bin:/bin \ + DJANGO_SETTINGS_MODULE=aurora.config.settings \ + PYTHONUNBUFFERED=1 \ + PYTHONDONTWRITEBYTECODE=1 \ + PYTHONPATH=""\ + UV_PROJECT_ENVIRONMENT=/venv \ + VIRTUAL_ENV=/venv \ + UWSGI_PROCESSES=4 + +WORKDIR /app +ADD uv.lock README.md MANIFEST.in pyproject.toml /app/ +ADD src /app/src/ + +RUN --mount=type=cache,target=/root/.uv-cache \ + uv sync --cache-dir=/root/.uv-cache \ + --python=/usr/local/bin/python \ + --python-preference=system \ + --frozen + +# ------- production only deps------- +FROM builder AS production + +WORKDIR /app +COPY uv.lock README.md MANIFEST.in pyproject.toml /app/ +COPY src /app/src/ +ENV PATH=/venv/bin:/usr/local/bin/:/usr/bin:/bin \ + PYTHONUNBUFFERED=1 \ + PYTHONDONTWRITEBYTECODE=1 \ + PYTHONPATH=""\ + UV_PROJECT_ENVIRONMENT=/venv \ + VIRTUAL_ENV=/venv + +RUN --mount=type=cache,target=/root/.uv-cache \ + uv sync --cache-dir=/root/.uv-cache \ + --python=/usr/local/bin/python \ + --python-preference=system \ + --no-dev --no-editable --frozen + + +FROM base AS dist +ARG BUILD_DATE +ARG VERSION +ARG GIT_SHA + +ENV PATH=/venv/bin:/usr/local/bin/:/usr/bin:/bin:/usr/sbin/:/usr/local/sbin/ \ + BUILD_DATE="${BUILD_DATE}" \ + DJANGO_SETTINGS_MODULE=aurora.config.settings \ + GIT_SHA="${GIT_SHA}" \ + MEDIA_ROOT=/var/aurora/media \ + PGSSLCERT="/tmp/postgresql.crt" \ + PYTHONDONTWRITEBYTECODE=1 \ + PYTHONPATH=""\ PYTHONUNBUFFERED=1 \ - SECRET_KEY="secret-key-just-for-build" \ - SENTRY_DSN="" \ - STATIC_ROOT="/var/static" \ + STATIC_ROOT=/var/aurora/static/ \ STATIC_URL="/static/" \ UWSGI_PROCESSES=4 \ - VERSION=${VERSION} \ - POETRY_VERSION=${POETRY_VERSION} + UV_PROJECT_ENVIRONMENT=/venv \ + VIRTUAL_ENV=/venv \ + VERSION="${VERSION}" + -# STATICFILES_STORAGE="django.contrib.staticfiles.storage.StaticFilesStorage" \ +WORKDIR /app +ADD docker/conf /conf/ +ADD docker/bin/* /usr/local/bin/ -RUN pip install "poetry==1.2.2" +ADD uv.lock README.md MANIFEST.in pyproject.toml /app/ +ADD src /app/src/ -ADD poetry.lock pyproject.toml /code/ +COPY --from=production /venv /venv +COPY --from=builder /usr/local/bin/uwsgi /usr/local/bin/uv /usr/local/bin/ -RUN poetry config virtualenvs.create false \ - && mkdir -p /var/nginx/system \ - && poetry install --no-interaction --no-ansi --no-root --without dev \ - && pip install uwsgi +RUN --mount=type=cache,target=/root/.uv-cache \ + uv --cache-dir=/root/.uv-cache pip install --no-deps . \ + && rm -fr /app/* -ADD . /code/ -ADD ./docker/conf/nginx/* /var/nginx/ -ADD ./docker/conf/* /conf/ -ADD ./docker/bin/* /usr/local/bin/ -ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] EXPOSE 8000 +ENTRYPOINT exec entrypoint.sh "$0" "$@" +CMD ["run"] + +LABEL distro="final" +LABEL maintainer="hope@unicef.org" +LABEL cheksum="$CHECKSUM" + +LABEL org.opencontainers.image.authors="hope@unicef.org" +LABEL org.opencontainers.image.created="$BUILD_DATE" +LABEL org.opencontainers.image.description="Bitcaster" +LABEL org.opencontainers.image.documentation="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}" +LABEL org.opencontainers.image.licenses="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/blob/${GIT_SHA:-master}/LICENSE.md" +LABEL org.opencontainers.image.revision=$GIT_SHA +LABEL org.opencontainers.image.source="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/tree/${GIT_SHA:-master}/" +LABEL org.opencontainers.image.title="Aurora" +LABEL org.opencontainers.image.version="$VERSION" +LABEL org.opencontainers.image.url="https://register.unicef.org/" +LABEL org.opencontainers.image.vendor="UNICEF" diff --git a/docker/Dockerfile.test b/docker/Dockerfile.test index ea1ebc7b..e117c5ef 100644 --- a/docker/Dockerfile.test +++ b/docker/Dockerfile.test @@ -1,4 +1,4 @@ -FROM python:3.9.2-slim-buster +FROM python:3.12-slim-bookworm ARG BUILD_DATE ARG VERSION @@ -51,14 +51,10 @@ ENV PATH="${PATH}:/root/.local/bin" \ VERSION=${VERSION} \ POETRY_VERSION=${POETRY_VERSION} -RUN pip install "poetry==1.2.2" +RUN pip install pdm==2.9.3 -ADD poetry.lock pyproject.toml /code/ - -RUN poetry config virtualenvs.create false \ - && mkdir -p /var/nginx/system \ - && poetry install --no-interaction --no-ansi --no-root \ - && pip install uwsgi +ADD pdm.toml pdm.lock /code/ +RUN pdm sync --prod --no-editable --no-self ADD . /code/ ADD ./docker/conf/nginx/* /var/nginx/ diff --git a/docker/Makefile b/docker/Makefile index 4b875868..b02818e0 100644 --- a/docker/Makefile +++ b/docker/Makefile @@ -1,121 +1,93 @@ # envs to set before use this script .ONESHELL: - -DEVELOP?=0 -DOCKER_PASS?= -DOCKER_USER?= -VERSION?=0.1.0 -BASE?=$(shell echo "${VERSION}" | sed "s/\([0-9]*\)\.\([0-9]*\)\.\(.*\)/\1.\2/g" ) -PUSH_BASE?=0 -DEVELOP?=1 +VERSION := $(shell grep -m 1 version ../pyproject.toml | tr -s ' ' | tr -d '"' | tr -d "'" | cut -d' ' -f3) BUILD_DATE:="$(shell date +"%Y-%m-%d %H:%M")" -# below vars are used internally -POETRY_VERSION=1.2.2 -BUILD_OPTIONS?=--squash -CMD?=run -WORKER?='??' -CONTAINER_NAME?=flex-form-cnt -LAZO?=$(shell which lazo) - -DOCKER_REGISTRY=ghcr.io -DOCKER_IMAGE_NAME=saxix/aurora -DOCKER_IMAGE=${DOCKER_IMAGE_NAME}:${VERSION} -DOCKER_TARGET=${DOCKER_REGISTRY}/${DOCKER_IMAGE} -DOCKERFILE?=Dockerfile - -RUN_OPTIONS?= -PIPENV_ARGS?= - -ISDIRTY=$(shell git status --porcelain) -IMAGE_EXISTS=$(shell docker ps -a | grep "${DOCKER_REGISTRY}/${DOCKER_IMAGE}") - -.PHONY: help runlocal -.DEFAULT_GOAL := help - -define PRINT_HELP_PYSCRIPT -import re, sys - -for line in sys.stdin: - match = re.match(r'^([a-zA-Z_-]+):.*?## (.*)$$', line) - if match: - target, help = match.groups() - print("%-20s %s" % (target, help)) -endef -export PRINT_HELP_PYSCRIPT - -BROWSER := python -c "$$BROWSER_PYSCRIPT" - -help: - @python -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST) - -check: - @[ -z "`git status --porcelain`" ] || ( git status ; echo "Uncommited changes"; exit 1 ) - export IMAGE_EXISTS=`curl --silent --header "PRIVATE-TOKEN: ${GITLAB_API_TOKEN}" https://gitlab.com/api/v4/projects/22122245/registry/repositories/1473455/tags/${VERSION} | jq '.location'` - @[ "${IMAGE_EXISTS}" != null ] || ( echo "Docker image '${DOCKER_TARGET}' already exists"; exit 1 ) - @[ -n "`git tag -l ${VERSION}`" ] || git tag -m "v${VERSION}" ${VERSION} +SOURCE_COMMIT=$(shell git rev-parse --short HEAD) +STAGE?=dist +TAG?=${VERSION} .build: - cd .. && docker build \ + DOCKER_BUILDKIT=1 cd .. && docker build \ + --build-arg BUILDKIT_INLINE_CACHE=1 \ --build-arg VERSION=${VERSION} \ --build-arg BUILD_DATE=${BUILD_DATE} \ - -t ${DOCKER_REGISTRY}/${DOCKER_IMAGE} \ - -f docker/${DOCKERFILE} . - docker tag ${DOCKER_REGISTRY}/${DOCKER_IMAGE} ${DOCKER_REGISTRY}/${DOCKER_IMAGE_NAME}:latest + --build-arg GIT_SHA=${SOURCE_COMMIT} \ + --cache-from ${DOCKER_IMAGE_NAME}:${VERSION}-base \ + --cache-from ${DOCKER_IMAGE_NAME}:${VERSION}-builder \ + --cache-from ${DOCKER_IMAGE_NAME}:${VERSION}-production \ + --cache-to type=inline,mode=max \ + --target ${STAGE} \ + ${BUILD_OPTIONS} \ + -t ${DOCKER_IMAGE} \ + -f docker/Dockerfile . docker images | grep "${DOCKER_IMAGE_NAME}" | grep ${VERSION} + docker inspect ${DOCKER_IMAGE} | jq -r '.[0].Config.Labels' -build: check .build -dev: - cd .. && docker build -t flex-dev -f docker/Dockerfile.dev . - DOCKER_IMAGE=flex-dev \ - CMD='/bin/bash' \ - RUN_OPTIONS="-it -v ${PWD}/conf/:/conf/ -v ${PWD}/../:/code/" \ - $(MAKE) .run +build: + DOCKER_IMAGE=${DOCKER_IMAGE_NAME}:${VERSION} \ + $(MAKE) .build ## build image + +.build-stage: + DOCKER_IMAGE=${DOCKER_IMAGE_NAME}:${VERSION}-${STAGE} \ + $(MAKE) .build + +base: + STAGE=base $(MAKE) .build-stage + +builder: + STAGE=builder $(MAKE) .build-stage + + +tests: + STAGE=tests $(MAKE) .build-stage + +production: + STAGE=production $(MAKE) .build-stage -.push: - git push +dist: + STAGE=dist $(MAKE) .build-stage + + +step1: base +step2: builder +step3: tests +step4: production +step5: dist .run: - mkdir -p ../${VOLUME} cd .. && docker run \ --rm \ - --name=${CONTAINER_NAME} \ - -p 8000:80 \ - -e AUTHENTICATION_BACKENDS=\ + -p 8000:8000 \ -e ADMIN_EMAIL="${ADMIN_EMAIL}" \ -e ADMIN_PASSWORD="${ADMIN_PASSWORD}" \ - -e DJANGO_ADMIN_URL=admin/ \ - -e ADMINS="" \ - -e MAINTENANCE_MODE="off" \ - -e LANGUAGE_CODE="uk-ua" \ -e ALLOWED_HOSTS="*" \ + -e CACHE_DEFAULT="redis://192.168.66.66:6379/0" \ + -e CELERY_BROKER_URL=redis://192.168.66.66:6379/0 \ + -e CSRF_COOKIE_SECURE=False \ + -e CSRF_TRUSTED_ORIGINS=http://localhost \ -e DATABASE_URL="${DATABASE_URL}" \ - -e DEBUG="0" \ - -e DJANGO_SETTINGS_MODULE="aurora.config.settings" \ + -e DEBUG="1" \ + -e DJANGO_ADMIN_URL=admin/ \ + -e DJANGO_SETTINGS_MODULE=aurora.config.settings \ + -e LOGGING_LEVEL="DEBUG" \ + -e SECRET_KEY="super-secret-key-just-for-testing" \ -e SENTRY_DSN="${SENTRY_DSN}" \ - -e REDIS_CONNSTR="192.168.66.66" \ - -e VERSION="${VERSION}" \ - -v ${PWD}/conf/:/conf/ \ - -v ${PWD}/bin/entrypoint.sh:/usr/local/bin/entrypoint.sh \ - -v ${PWD}/conf/nginx:/var/nginx/ \ + -e SOCIAL_AUTH_REDIRECT_IS_HTTPS="False" \ + -e SUPERUSERS="sax," \ + -v ./docker/conf/:/conf \ ${RUN_OPTIONS} \ - ${DOCKER_REGISTRY}/${DOCKER_IMAGE_NAME}:latest \ + -t ${DOCKER_IMAGE} \ ${CMD} -# -e STATICFILES_STORAGE="django.contrib.staticfiles.storage.ManifestStaticFilesStorage"\ -#docker push registry.gitlab.com/bitcaster/bitcaster - -release: .docker-login ## release version $VERSION - docker tag ${DOCKER_REGISTRY}/${DOCKER_IMAGE_NAME}:${VERSION} ${DOCKER_REGISTRY}/${DOCKER_IMAGE_NAME}:latest - docker push ${DOCKER_REGISTRY}/${DOCKER_IMAGE_NAME}:latest - docker push ${DOCKER_REGISTRY}/${DOCKER_IMAGE_NAME}:${VERSION} +run: + DOCKER_IMAGE=${DOCKER_IMAGE_NAME}:${TAG} \ + CMD='run' \ + $(MAKE) .run -run: ## run image locally - CMD="" $(MAKE) .run -test: ## test docker image - CMD='sh -c "whoami && django-admin collectstatic --no-input -v 3 && django-admin check --deploy"' \ +shell: + DOCKER_IMAGE=${DOCKER_IMAGE_NAME}:${TAG} \ + RUN_OPTIONS="-it" \ + CMD='/bin/bash' \ $(MAKE) .run - -shell: ## run docker and spawn a shell - DOCKER_IMAGE=${DOCKER_IMAGE_NAME}:latest RUN_OPTIONS=-it CMD='/bin/bash' $(MAKE) .run diff --git a/docker/bin/entrypoint.sh b/docker/bin/entrypoint.sh index 889401f8..2025df7c 100755 --- a/docker/bin/entrypoint.sh +++ b/docker/bin/entrypoint.sh @@ -2,6 +2,9 @@ set -e export NGINX_MAX_BODY_SIZE="${NGINX_MAX_BODY_SIZE:-30M}" export NGINX_CACHE_DIR="${NGINX_CACHE_DIR:-/data/nginx/cache}" +export MEDIA_ROOT="${MEDIA_ROOT:-/var/media}" +export STATIC_ROOT="${STATIC_ROOT:-/var/static}" + export REDIS_LOGLEVEL="${REDIS_LOGLEVEL:-warning}" export REDIS_MAXMEMORY="${REDIS_MAXMEMORY:-100Mb}" export REDIS_MAXMEMORY_POLICY="${REDIS_MAXMEMORY_POLICY:-volatile-ttl}" @@ -11,30 +14,31 @@ export AURORA_BUILD=${BUILD_DATE} export DOLLAR='$' mkdir -p /var/run /var/nginx ${NGINX_CACHE_DIR} ${MEDIA_ROOT} ${STATIC_ROOT} -echo "created support dirs /var/run ${MEDIA_ROOT} ${STATIC_ROOT}" +echo "created support dirs /var/run '${MEDIA_ROOT}' '${STATIC_ROOT}' " +case "$1" in + "run") + envsubst < /conf/nginx.conf.tpl > /conf/nginx.conf && nginx -tc /conf/nginx.conf -if [ $# -eq 0 ]; then - envsubst < /conf/nginx.conf.tpl > /conf/nginx.conf && nginx -tc /conf/nginx.conf - envsubst < /conf/redis.conf.tpl > /conf/redis.conf + django-admin upgrade --no-input - django-admin upgrade --no-input + nginx -c /conf/nginx.conf + exec uwsgi --ini /conf/uwsgi.ini - nginx -c /conf/nginx.conf - redis-server /conf/redis.conf - exec uwsgi --ini /conf/uwsgi.ini -# exec gunicorn aurora.config.wsgi -c /conf/gunicorn_config.py -else - case "$1" in - "dev") + ;; + "dev") until pg_isready -h db -p 5432; do echo "waiting for database"; sleep 2; done; django-admin collectstatic --no-input django-admin migrate django-admin runserver 0.0.0.0:8000 - ;; - *) - exec "$@" ;; - esac -fi + "setup") + until pg_isready -h db -p 5432; + do echo "waiting for database"; sleep 2; done; + django-admin upgrade --no-input + ;; +*) +exec "$@" +;; +esac diff --git a/docker/conf/.dockerignore b/docker/conf/.dockerignore new file mode 100644 index 00000000..6f720dc4 --- /dev/null +++ b/docker/conf/.dockerignore @@ -0,0 +1,2 @@ +nginx.conf +redis.conf diff --git a/docker/conf/.gitignore b/docker/conf/.gitignore new file mode 100644 index 00000000..75db72a5 --- /dev/null +++ b/docker/conf/.gitignore @@ -0,0 +1 @@ +nginx.conf diff --git a/docker/conf/circus.conf b/docker/conf/circus.conf deleted file mode 100644 index 5c97e05c..00000000 --- a/docker/conf/circus.conf +++ /dev/null @@ -1,34 +0,0 @@ -[circus] -check_delay = 5 -# endpoint = tcp://127.0.0.1:5555 -# pubsub_endpoint = tcp://127.0.0.1:5556 -umask = 002 -working_dir = $(CIRCUS.ENV.PWD) -debug = false -stdout_stream.class = StdoutStream -stderr_stream.class = StdoutStream - -[watcher:web] -cmd = nginx -args = -c /etc/nginx.conf -user = www -use_sockets = True -copy_env = true -autostart = true - -[watcher:app] -cmd = uwsgi -args = --ini /etc/uwsgi.ini -user = www -use_sockets = True -copy_env = true -autostart = true - - -[watcher:daphne] -cmd = uwsgi -args = --ini /etc/uwsgi.ini -user = www -use_sockets = True -copy_env = true -autostart = true diff --git a/docker/conf/gunicorn_config.py b/docker/conf/gunicorn_config.py deleted file mode 100644 index 69d3cb4f..00000000 --- a/docker/conf/gunicorn_config.py +++ /dev/null @@ -1,62 +0,0 @@ -bind = "0.0.0.0:8000" -backlog = 512 - -workers = 4 -worker_class = "sync" -# worker_connections = 1000 # This setting only affects the Eventlet and Gevent worker types. -max_requests = 1000 -timeout = 30 -keepalive = 2 - -proc_name = None -daemon = False -pidfile = None -umask = 0 -user = None -group = None -tmp_upload_dir = None - - -errorlog = "-" -loglevel = "error" -accesslog = "-" -access_log_format = '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"' - - -def post_fork(server, worker): - server.log.info("Worker spawned (pid: %s)", worker.pid) - - -def pre_fork(server, worker): - pass - - -def pre_exec(server): - server.log.info("Forked child, re-executing.") - - -def when_ready(server): - server.log.info("Server is ready. Spawning workers") - - -def worker_int(worker): - worker.log.info("Worker received INT or QUIT signal") - - # get traceback info - import sys - import threading - import traceback - - id2name = {th.ident: th.name for th in threading.enumerate()} - code = [] - for threadId, stack in sys._current_frames().items(): - code.append("\n# Thread: %s(%d)" % (id2name.get(threadId, ""), threadId)) - for filename, lineno, name, line in traceback.extract_stack(stack): - code.append('File: "%s", line %d, in %s' % (filename, lineno, name)) - if line: - code.append(" %s" % (line.strip())) - worker.log.debug("\n".join(code)) - - -def worker_abort(worker): - worker.log.info("worker received SIGABRT signal") diff --git a/docker/conf/nginx.conf.tpl b/docker/conf/nginx.conf.tpl index 6326913e..69dd8eb6 100644 --- a/docker/conf/nginx.conf.tpl +++ b/docker/conf/nginx.conf.tpl @@ -62,6 +62,7 @@ http { client_max_body_size ${NGINX_MAX_BODY_SIZE}; large_client_header_buffers 4 16k; access_log /dev/stdout; + listen 8000; listen 80; proxy_no_cache 1; proxy_cache_bypass 1; @@ -72,6 +73,8 @@ http { add_header X-Aurora-Version "${AURORA_VERSION}" always; add_header X-Aurora-Build "${AURORA_BUILD}" always; add_header X-Aurora-Time "${DOLLAR}date_gmt" always; + add_header X-Aurora-static-root "${STATIC_ROOT}" always; + add_header X-Aurora-static-url "${STATIC_URL}" always; location /404.html { root /var/nginx/; @@ -147,6 +150,7 @@ http { add_header X-Aurora-Version "${AURORA_VERSION}"; add_header X-Aurora-Build "${AURORA_BUILD}"; add_header X-Aurora-Time "${DOLLAR}date_gmt"; + add_header X-Aurora-static "${STATIC_URL}"; expires max; gzip_static on; @@ -169,7 +173,7 @@ http { add_header X-Aurora-Build "${AURORA_BUILD}"; add_header X-Aurora-Time "${DOLLAR}date_gmt"; - proxy_pass http://127.0.0.1:8000; + proxy_pass http://127.0.0.1:8888; proxy_set_header Host ${DOLLAR}host; proxy_set_header X-Forwarded-For ${DOLLAR}proxy_add_x_forwarded_for; proxy_set_header X-Scheme ${DOLLAR}scheme; diff --git a/docker/conf/redis.conf.tpl b/docker/conf/redis.conf.tpl deleted file mode 100644 index cba15a8f..00000000 --- a/docker/conf/redis.conf.tpl +++ /dev/null @@ -1,1406 +0,0 @@ -# Redis configuration file example. -# -# Note that in order to read the configuration file, Redis must be -# started with the file path as first argument: -# -# ./redis-server /path/to/redis.conf - -# Note on units: when memory size is needed, it is possible to specify -# it in the usual form of 1k 5GB 4M and so forth: -# -# 1k => 1000 bytes -# 1kb => 1024 bytes -# 1m => 1000000 bytes -# 1mb => 1024*1024 bytes -# 1g => 1000000000 bytes -# 1gb => 1024*1024*1024 bytes -# -# units are case insensitive so 1GB 1Gb 1gB are all the same. - -################################## INCLUDES ################################### - -# Include one or more other config files here. This is useful if you -# have a standard template that goes to all Redis servers but also need -# to customize a few per-server settings. Include files can include -# other files, so use this wisely. -# -# Notice option "include" won't be rewritten by command "CONFIG REWRITE" -# from admin or Redis Sentinel. Since Redis always uses the last processed -# line as value of a configuration directive, you'd better put includes -# at the beginning of this file to avoid overwriting config change at runtime. -# -# If instead you are interested in using includes to override configuration -# options, it is better to use include as the last line. -# -# include /path/to/local.conf -# include /path/to/other.conf - -################################## MODULES ##################################### - -# Load modules at startup. If the server is not able to load modules -# it will abort. It is possible to use multiple loadmodule directives. -# -# loadmodule /path/to/my_module.so -# loadmodule /path/to/other_module.so - -################################## NETWORK ##################################### - -# By default, if no "bind" configuration directive is specified, Redis listens -# for connections from all the network interfaces available on the server. -# It is possible to listen to just one or multiple selected interfaces using -# the "bind" configuration directive, followed by one or more IP addresses. -# -# Examples: -# -# bind 192.168.1.100 10.0.0.1 -# bind 127.0.0.1 ::1 -# -# ~~~ WARNING ~~~ If the computer running Redis is directly exposed to the -# internet, binding to all the interfaces is dangerous and will expose the -# instance to everybody on the internet. So by default we uncomment the -# following bind directive, that will force Redis to listen only into -# the IPv4 loopback interface address (this means Redis will be able to -# accept connections only from clients running into the same computer it -# is running). -# -# IF YOU ARE SURE YOU WANT YOUR INSTANCE TO LISTEN TO ALL THE INTERFACES -# JUST COMMENT THE FOLLOWING LINE. -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -bind 127.0.0.1 ::1 - -# Protected mode is a layer of security protection, in order to avoid that -# Redis instances left open on the internet are accessed and exploited. -# -# When protected mode is on and if: -# -# 1) The server is not binding explicitly to a set of addresses using the -# "bind" directive. -# 2) No password is configured. -# -# The server only accepts connections from clients connecting from the -# IPv4 and IPv6 loopback addresses 127.0.0.1 and ::1, and from Unix domain -# sockets. -# -# By default protected mode is enabled. You should disable it only if -# you are sure you want clients from other hosts to connect to Redis -# even if no authentication is configured, nor a specific set of interfaces -# are explicitly listed using the "bind" directive. -protected-mode yes - -# Accept connections on the specified port, default is 6379 (IANA #815344). -# If port 0 is specified Redis will not listen on a TCP socket. -port 6379 - -# TCP listen() backlog. -# -# In high requests-per-second environments you need an high backlog in order -# to avoid slow clients connections issues. Note that the Linux kernel -# will silently truncate it to the value of /proc/sys/net/core/somaxconn so -# make sure to raise both the value of somaxconn and tcp_max_syn_backlog -# in order to get the desired effect. -tcp-backlog 511 - -# Unix socket. -# -# Specify the path for the Unix socket that will be used to listen for -# incoming connections. There is no default, so Redis will not listen -# on a unix socket when not specified. -# -# unixsocket /var/run/redis/redis-server.sock -# unixsocketperm 700 - -# Close the connection after a client is idle for N seconds (0 to disable) -timeout 0 - -# TCP keepalive. -# -# If non-zero, use SO_KEEPALIVE to send TCP ACKs to clients in absence -# of communication. This is useful for two reasons: -# -# 1) Detect dead peers. -# 2) Take the connection alive from the point of view of network -# equipment in the middle. -# -# On Linux, the specified value (in seconds) is the period used to send ACKs. -# Note that to close the connection the double of the time is needed. -# On other kernels the period depends on the kernel configuration. -# -# A reasonable value for this option is 300 seconds, which is the new -# Redis default starting with Redis 3.2.1. -tcp-keepalive 300 - -################################# GENERAL ##################################### - -# By default Redis does not run as a daemon. Use 'yes' if you need it. -# Note that Redis will write a pid file in /var/run/redis.pid when daemonized. -daemonize yes - -# If you run Redis from upstart or systemd, Redis can interact with your -# supervision tree. Options: -# supervised no - no supervision interaction -# supervised upstart - signal upstart by putting Redis into SIGSTOP mode -# supervised systemd - signal systemd by writing READY=1 to $NOTIFY_SOCKET -# supervised auto - detect upstart or systemd method based on -# UPSTART_JOB or NOTIFY_SOCKET environment variables -# Note: these supervision methods only signal "process is ready." -# They do not enable continuous liveness pings back to your supervisor. -supervised no - -# If a pid file is specified, Redis writes it where specified at startup -# and removes it at exit. -# -# When the server runs non daemonized, no pid file is created if none is -# specified in the configuration. When the server is daemonized, the pid file -# is used even if not specified, defaulting to "/var/run/redis.pid". -# -# Creating a pid file is best effort: if Redis is not able to create it -# nothing bad happens, the server will start and run normally. -pidfile /var/run/redis/redis-server.pid - -# Specify the server verbosity level. -# This can be one of: -# debug (a lot of information, useful for development/testing) -# verbose (many rarely useful info, but not a mess like the debug level) -# notice (moderately verbose, what you want in production probably) -# warning (only very important / critical messages are logged) -loglevel ${REDIS_LOGLEVEL} - -# Specify the log file name. Also the empty string can be used to force -# Redis to log on the standard output. Note that if you use standard -# output for logging but daemonize, logs will be sent to /dev/null -logfile /var/log/redis/redis-server.log - -# To enable logging to the system logger, just set 'syslog-enabled' to yes, -# and optionally update the other syslog parameters to suit your needs. -# syslog-enabled no - -# Specify the syslog identity. -# syslog-ident redis - -# Specify the syslog facility. Must be USER or between LOCAL0-LOCAL7. -# syslog-facility local0 - -# Set the number of databases. The default database is DB 0, you can select -# a different one on a per-connection basis using SELECT where -# dbid is a number between 0 and 'databases'-1 -databases 16 - -# By default Redis shows an ASCII art logo only when started to log to the -# standard output and if the standard output is a TTY. Basically this means -# that normally a logo is displayed only in interactive sessions. -# -# However it is possible to force the pre-4.0 behavior and always show a -# ASCII art logo in startup logs by setting the following option to yes. -always-show-logo yes - -################################ SNAPSHOTTING ################################ -# -# Save the DB on disk: -# -# save -# -# Will save the DB if both the given number of seconds and the given -# number of write operations against the DB occurred. -# -# In the example below the behaviour will be to save: -# after 900 sec (15 min) if at least 1 key changed -# after 300 sec (5 min) if at least 10 keys changed -# after 60 sec if at least 10000 keys changed -# -# Note: you can disable saving completely by commenting out all "save" lines. -# -# It is also possible to remove all the previously configured save -# points by adding a save directive with a single empty string argument -# like in the following example: -# -# save "" - -save "" - -# save 900 1 -# save 300 10 -# save 60 10000 - -# By default Redis will stop accepting writes if RDB snapshots are enabled -# (at least one save point) and the latest background save failed. -# This will make the user aware (in a hard way) that data is not persisting -# on disk properly, otherwise chances are that no one will notice and some -# disaster will happen. -# -# If the background saving process will start working again Redis will -# automatically allow writes again. -# -# However if you have setup your proper monitoring of the Redis server -# and persistence, you may want to disable this feature so that Redis will -# continue to work as usual even if there are problems with disk, -# permissions, and so forth. -stop-writes-on-bgsave-error yes - -# Compress string objects using LZF when dump .rdb databases? -# For default that's set to 'yes' as it's almost always a win. -# If you want to save some CPU in the saving child set it to 'no' but -# the dataset will likely be bigger if you have compressible values or keys. -rdbcompression yes - -# Since version 5 of RDB a CRC64 checksum is placed at the end of the file. -# This makes the format more resistant to corruption but there is a performance -# hit to pay (around 10%) when saving and loading RDB files, so you can disable it -# for maximum performances. -# -# RDB files created with checksum disabled have a checksum of zero that will -# tell the loading code to skip the check. -rdbchecksum yes - -# The filename where to dump the DB -dbfilename dump.rdb - -# The working directory. -# -# The DB will be written inside this directory, with the filename specified -# above using the 'dbfilename' configuration directive. -# -# The Append Only File will also be created inside this directory. -# -# Note that you must specify a directory here, not a file name. -dir /var/lib/redis - -################################# REPLICATION ################################# - -# Master-Replica replication. Use replicaof to make a Redis instance a copy of -# another Redis server. A few things to understand ASAP about Redis replication. -# -# +------------------+ +---------------+ -# | Master | ---> | Replica | -# | (receive writes) | | (exact copy) | -# +------------------+ +---------------+ -# -# 1) Redis replication is asynchronous, but you can configure a master to -# stop accepting writes if it appears to be not connected with at least -# a given number of replicas. -# 2) Redis replicas are able to perform a partial resynchronization with the -# master if the replication link is lost for a relatively small amount of -# time. You may want to configure the replication backlog size (see the next -# sections of this file) with a sensible value depending on your needs. -# 3) Replication is automatic and does not need user intervention. After a -# network partition replicas automatically try to reconnect to masters -# and resynchronize with them. -# -# replicaof - -# If the master is password protected (using the "requirepass" configuration -# directive below) it is possible to tell the replica to authenticate before -# starting the replication synchronization process, otherwise the master will -# refuse the replica request. -# -# masterauth - -# When a replica loses its connection with the master, or when the replication -# is still in progress, the replica can act in two different ways: -# -# 1) if replica-serve-stale-data is set to 'yes' (the default) the replica will -# still reply to client requests, possibly with out of date data, or the -# data set may just be empty if this is the first synchronization. -# -# 2) if replica-serve-stale-data is set to 'no' the replica will reply with -# an error "SYNC with master in progress" to all the kind of commands -# but to INFO, replicaOF, AUTH, PING, SHUTDOWN, REPLCONF, ROLE, CONFIG, -# SUBSCRIBE, UNSUBSCRIBE, PSUBSCRIBE, PUNSUBSCRIBE, PUBLISH, PUBSUB, -# COMMAND, POST, HOST: and LATENCY. -# -replica-serve-stale-data yes - -# You can configure a replica instance to accept writes or not. Writing against -# a replica instance may be useful to store some ephemeral data (because data -# written on a replica will be easily deleted after resync with the master) but -# may also cause problems if clients are writing to it because of a -# misconfiguration. -# -# Since Redis 2.6 by default replicas are read-only. -# -# Note: read only replicas are not designed to be exposed to untrusted clients -# on the internet. It's just a protection layer against misuse of the instance. -# Still a read only replica exports by default all the administrative commands -# such as CONFIG, DEBUG, and so forth. To a limited extent you can improve -# security of read only replicas using 'rename-command' to shadow all the -# administrative / dangerous commands. -replica-read-only yes - -# Replication SYNC strategy: disk or socket. -# -# ------------------------------------------------------- -# WARNING: DISKLESS REPLICATION IS EXPERIMENTAL CURRENTLY -# ------------------------------------------------------- -# -# New replicas and reconnecting replicas that are not able to continue the replication -# process just receiving differences, need to do what is called a "full -# synchronization". An RDB file is transmitted from the master to the replicas. -# The transmission can happen in two different ways: -# -# 1) Disk-backed: The Redis master creates a new process that writes the RDB -# file on disk. Later the file is transferred by the parent -# process to the replicas incrementally. -# 2) Diskless: The Redis master creates a new process that directly writes the -# RDB file to replica sockets, without touching the disk at all. -# -# With disk-backed replication, while the RDB file is generated, more replicas -# can be queued and served with the RDB file as soon as the current child producing -# the RDB file finishes its work. With diskless replication instead once -# the transfer starts, new replicas arriving will be queued and a new transfer -# will start when the current one terminates. -# -# When diskless replication is used, the master waits a configurable amount of -# time (in seconds) before starting the transfer in the hope that multiple replicas -# will arrive and the transfer can be parallelized. -# -# With slow disks and fast (large bandwidth) networks, diskless replication -# works better. -repl-diskless-sync no - -# When diskless replication is enabled, it is possible to configure the delay -# the server waits in order to spawn the child that transfers the RDB via socket -# to the replicas. -# -# This is important since once the transfer starts, it is not possible to serve -# new replicas arriving, that will be queued for the next RDB transfer, so the server -# waits a delay in order to let more replicas arrive. -# -# The delay is specified in seconds, and by default is 5 seconds. To disable -# it entirely just set it to 0 seconds and the transfer will start ASAP. -repl-diskless-sync-delay 5 - -# Replicas send PINGs to server in a predefined interval. It's possible to change -# this interval with the repl_ping_replica_period option. The default value is 10 -# seconds. -# -# repl-ping-replica-period 10 - -# The following option sets the replication timeout for: -# -# 1) Bulk transfer I/O during SYNC, from the point of view of replica. -# 2) Master timeout from the point of view of replicas (data, pings). -# 3) Replica timeout from the point of view of masters (REPLCONF ACK pings). -# -# It is important to make sure that this value is greater than the value -# specified for repl-ping-replica-period otherwise a timeout will be detected -# every time there is low traffic between the master and the replica. -# -# repl-timeout 60 - -# Disable TCP_NODELAY on the replica socket after SYNC? -# -# If you select "yes" Redis will use a smaller number of TCP packets and -# less bandwidth to send data to replicas. But this can add a delay for -# the data to appear on the replica side, up to 40 milliseconds with -# Linux kernels using a default configuration. -# -# If you select "no" the delay for data to appear on the replica side will -# be reduced but more bandwidth will be used for replication. -# -# By default we optimize for low latency, but in very high traffic conditions -# or when the master and replicas are many hops away, turning this to "yes" may -# be a good idea. -repl-disable-tcp-nodelay no - -# Set the replication backlog size. The backlog is a buffer that accumulates -# replica data when replicas are disconnected for some time, so that when a replica -# wants to reconnect again, often a full resync is not needed, but a partial -# resync is enough, just passing the portion of data the replica missed while -# disconnected. -# -# The bigger the replication backlog, the longer the time the replica can be -# disconnected and later be able to perform a partial resynchronization. -# -# The backlog is only allocated once there is at least a replica connected. -# -# repl-backlog-size 1mb - -# After a master has no longer connected replicas for some time, the backlog -# will be freed. The following option configures the amount of seconds that -# need to elapse, starting from the time the last replica disconnected, for -# the backlog buffer to be freed. -# -# Note that replicas never free the backlog for timeout, since they may be -# promoted to masters later, and should be able to correctly "partially -# resynchronize" with the replicas: hence they should always accumulate backlog. -# -# A value of 0 means to never release the backlog. -# -# repl-backlog-ttl 3600 - -# The replica priority is an integer number published by Redis in the INFO output. -# It is used by Redis Sentinel in order to select a replica to promote into a -# master if the master is no longer working correctly. -# -# A replica with a low priority number is considered better for promotion, so -# for instance if there are three replicas with priority 10, 100, 25 Sentinel will -# pick the one with priority 10, that is the lowest. -# -# However a special priority of 0 marks the replica as not able to perform the -# role of master, so a replica with priority of 0 will never be selected by -# Redis Sentinel for promotion. -# -# By default the priority is 100. -replica-priority 100 - -# It is possible for a master to stop accepting writes if there are less than -# N replicas connected, having a lag less or equal than M seconds. -# -# The N replicas need to be in "online" state. -# -# The lag in seconds, that must be <= the specified value, is calculated from -# the last ping received from the replica, that is usually sent every second. -# -# This option does not GUARANTEE that N replicas will accept the write, but -# will limit the window of exposure for lost writes in case not enough replicas -# are available, to the specified number of seconds. -# -# For example to require at least 3 replicas with a lag <= 10 seconds use: -# -# min-replicas-to-write 3 -# min-replicas-max-lag 10 -# -# Setting one or the other to 0 disables the feature. -# -# By default min-replicas-to-write is set to 0 (feature disabled) and -# min-replicas-max-lag is set to 10. - -# A Redis master is able to list the address and port of the attached -# replicas in different ways. For example the "INFO replication" section -# offers this information, which is used, among other tools, by -# Redis Sentinel in order to discover replica instances. -# Another place where this info is available is in the output of the -# "ROLE" command of a master. -# -# The listed IP and address normally reported by a replica is obtained -# in the following way: -# -# IP: The address is auto detected by checking the peer address -# of the socket used by the replica to connect with the master. -# -# Port: The port is communicated by the replica during the replication -# handshake, and is normally the port that the replica is using to -# listen for connections. -# -# However when port forwarding or Network Address Translation (NAT) is -# used, the replica may be actually reachable via different IP and port -# pairs. The following two options can be used by a replica in order to -# report to its master a specific set of IP and port, so that both INFO -# and ROLE will report those values. -# -# There is no need to use both the options if you need to override just -# the port or the IP address. -# -# replica-announce-ip 5.5.5.5 -# replica-announce-port 1234 - -################################## SECURITY ################################### - -# Require clients to issue AUTH before processing any other -# commands. This might be useful in environments in which you do not trust -# others with access to the host running redis-server. -# -# This should stay commented out for backward compatibility and because most -# people do not need auth (e.g. they run their own servers). -# -# Warning: since Redis is pretty fast an outside user can try up to -# 150k passwords per second against a good box. This means that you should -# use a very strong password otherwise it will be very easy to break. -# -# requirepass foobared - -# Command renaming. -# -# It is possible to change the name of dangerous commands in a shared -# environment. For instance the CONFIG command may be renamed into something -# hard to guess so that it will still be available for internal-use tools -# but not available for general clients. -# -# Example: -# -# rename-command CONFIG b840fc02d524045429941cc15f59e41cb7be6c52 -# -# It is also possible to completely kill a command by renaming it into -# an empty string: -# -# rename-command CONFIG "" -# -# Please note that changing the name of commands that are logged into the -# AOF file or transmitted to replicas may cause problems. - -################################### CLIENTS #################################### - -# Set the max number of connected clients at the same time. By default -# this limit is set to 10000 clients, however if the Redis server is not -# able to configure the process file limit to allow for the specified limit -# the max number of allowed clients is set to the current file limit -# minus 32 (as Redis reserves a few file descriptors for internal uses). -# -# Once the limit is reached Redis will close all the new connections sending -# an error 'max number of clients reached'. -# -# maxclients 10000 - -############################## MEMORY MANAGEMENT ################################ - -# Set a memory usage limit to the specified amount of bytes. -# When the memory limit is reached Redis will try to remove keys -# according to the eviction policy selected (see maxmemory-policy). -# -# If Redis can't remove keys according to the policy, or if the policy is -# set to 'noeviction', Redis will start to reply with errors to commands -# that would use more memory, like SET, LPUSH, and so on, and will continue -# to reply to read-only commands like GET. -# -# This option is usually useful when using Redis as an LRU or LFU cache, or to -# set a hard memory limit for an instance (using the 'noeviction' policy). -# -# WARNING: If you have replicas attached to an instance with maxmemory on, -# the size of the output buffers needed to feed the replicas are subtracted -# from the used memory count, so that network problems / resyncs will -# not trigger a loop where keys are evicted, and in turn the output -# buffer of replicas is full with DELs of keys evicted triggering the deletion -# of more keys, and so forth until the database is completely emptied. -# -# In short... if you have replicas attached it is suggested that you set a lower -# limit for maxmemory so that there is some free RAM on the system for replica -# output buffers (but this is not needed if the policy is 'noeviction'). -# -maxmemory ${REDIS_MAXMEMORY} - -# MAXMEMORY POLICY: how Redis will select what to remove when maxmemory -# is reached. You can select among five behaviors: -# -# volatile-lru -> Evict using approximated LRU among the keys with an expire set. -# allkeys-lru -> Evict any key using approximated LRU. -# volatile-lfu -> Evict using approximated LFU among the keys with an expire set. -# allkeys-lfu -> Evict any key using approximated LFU. -# volatile-random -> Remove a random key among the ones with an expire set. -# allkeys-random -> Remove a random key, any key. -# volatile-ttl -> Remove the key with the nearest expire time (minor TTL) -# noeviction -> Don't evict anything, just return an error on write operations. -# -# LRU means Least Recently Used -# LFU means Least Frequently Used -# -# Both LRU, LFU and volatile-ttl are implemented using approximated -# randomized algorithms. -# -# Note: with any of the above policies, Redis will return an error on write -# operations, when there are no suitable keys for eviction. -# -# At the date of writing these commands are: set setnx setex append -# incr decr rpush lpush rpushx lpushx linsert lset rpoplpush sadd -# sinter sinterstore sunion sunionstore sdiff sdiffstore zadd zincrby -# zunionstore zinterstore hset hsetnx hmset hincrby incrby decrby -# getset mset msetnx exec sort -# -# The default is: -# - -maxmemory-policy ${REDIS_MAXMEMORY_POLICY} - -# LRU, LFU and minimal TTL algorithms are not precise algorithms but approximated -# algorithms (in order to save memory), so you can tune it for speed or -# accuracy. For default Redis will check five keys and pick the one that was -# used less recently, you can change the sample size using the following -# configuration directive. -# -# The default of 5 produces good enough results. 10 Approximates very closely -# true LRU but costs more CPU. 3 is faster but not very accurate. -# -# maxmemory-samples 5 - -# Starting from Redis 5, by default a replica will ignore its maxmemory setting -# (unless it is promoted to master after a failover or manually). It means -# that the eviction of keys will be just handled by the master, sending the -# DEL commands to the replica as keys evict in the master side. -# -# This behavior ensures that masters and replicas stay consistent, and is usually -# what you want, however if your replica is writable, or you want the replica to have -# a different memory setting, and you are sure all the writes performed to the -# replica are idempotent, then you may change this default (but be sure to understand -# what you are doing). -# -# Note that since the replica by default does not evict, it may end using more -# memory than the one set via maxmemory (there are certain buffers that may -# be larger on the replica, or data structures may sometimes take more memory and so -# forth). So make sure you monitor your replicas and make sure they have enough -# memory to never hit a real out-of-memory condition before the master hits -# the configured maxmemory setting. -# -# replica-ignore-maxmemory yes - -############################# LAZY FREEING #################################### - -# Redis has two primitives to delete keys. One is called DEL and is a blocking -# deletion of the object. It means that the server stops processing new commands -# in order to reclaim all the memory associated with an object in a synchronous -# way. If the key deleted is associated with a small object, the time needed -# in order to execute the DEL command is very small and comparable to most other -# O(1) or O(log_N) commands in Redis. However if the key is associated with an -# aggregated value containing millions of elements, the server can block for -# a long time (even seconds) in order to complete the operation. -# -# For the above reasons Redis also offers non blocking deletion primitives -# such as UNLINK (non blocking DEL) and the ASYNC option of FLUSHALL and -# FLUSHDB commands, in order to reclaim memory in background. Those commands -# are executed in constant time. Another thread will incrementally free the -# object in the background as fast as possible. -# -# DEL, UNLINK and ASYNC option of FLUSHALL and FLUSHDB are user-controlled. -# It's up to the design of the application to understand when it is a good -# idea to use one or the other. However the Redis server sometimes has to -# delete keys or flush the whole database as a side effect of other operations. -# Specifically Redis deletes objects independently of a user call in the -# following scenarios: -# -# 1) On eviction, because of the maxmemory and maxmemory policy configurations, -# in order to make room for new data, without going over the specified -# memory limit. -# 2) Because of expire: when a key with an associated time to live (see the -# EXPIRE command) must be deleted from memory. -# 3) Because of a side effect of a command that stores data on a key that may -# already exist. For example the RENAME command may delete the old key -# content when it is replaced with another one. Similarly SUNIONSTORE -# or SORT with STORE option may delete existing keys. The SET command -# itself removes any old content of the specified key in order to replace -# it with the specified string. -# 4) During replication, when a replica performs a full resynchronization with -# its master, the content of the whole database is removed in order to -# load the RDB file just transferred. -# -# In all the above cases the default is to delete objects in a blocking way, -# like if DEL was called. However you can configure each case specifically -# in order to instead release memory in a non-blocking way like if UNLINK -# was called, using the following configuration directives: - -lazyfree-lazy-eviction no -lazyfree-lazy-expire no -lazyfree-lazy-server-del no -replica-lazy-flush no - -############################## APPEND ONLY MODE ############################### - -# By default Redis asynchronously dumps the dataset on disk. This mode is -# good enough in many applications, but an issue with the Redis process or -# a power outage may result into a few minutes of writes lost (depending on -# the configured save points). -# -# The Append Only File is an alternative persistence mode that provides -# much better durability. For instance using the default data fsync policy -# (see later in the config file) Redis can lose just one second of writes in a -# dramatic event like a server power outage, or a single write if something -# wrong with the Redis process itself happens, but the operating system is -# still running correctly. -# -# AOF and RDB persistence can be enabled at the same time without problems. -# If the AOF is enabled on startup Redis will load the AOF, that is the file -# with the better durability guarantees. -# -# Please check http://redis.io/topics/persistence for more information. - -appendonly no - -# The name of the append only file (default: "appendonly.aof") - -appendfilename "appendonly.aof" - -# The fsync() call tells the Operating System to actually write data on disk -# instead of waiting for more data in the output buffer. Some OS will really flush -# data on disk, some other OS will just try to do it ASAP. -# -# Redis supports three different modes: -# -# no: don't fsync, just let the OS flush the data when it wants. Faster. -# always: fsync after every write to the append only log. Slow, Safest. -# everysec: fsync only one time every second. Compromise. -# -# The default is "everysec", as that's usually the right compromise between -# speed and data safety. It's up to you to understand if you can relax this to -# "no" that will let the operating system flush the output buffer when -# it wants, for better performances (but if you can live with the idea of -# some data loss consider the default persistence mode that's snapshotting), -# or on the contrary, use "always" that's very slow but a bit safer than -# everysec. -# -# More details please check the following article: -# http://antirez.com/post/redis-persistence-demystified.html -# -# If unsure, use "everysec". - -# appendfsync always -appendfsync everysec -# appendfsync no - -# When the AOF fsync policy is set to always or everysec, and a background -# saving process (a background save or AOF log background rewriting) is -# performing a lot of I/O against the disk, in some Linux configurations -# Redis may block too long on the fsync() call. Note that there is no fix for -# this currently, as even performing fsync in a different thread will block -# our synchronous write(2) call. -# -# In order to mitigate this problem it's possible to use the following option -# that will prevent fsync() from being called in the main process while a -# BGSAVE or BGREWRITEAOF is in progress. -# -# This means that while another child is saving, the durability of Redis is -# the same as "appendfsync none". In practical terms, this means that it is -# possible to lose up to 30 seconds of log in the worst scenario (with the -# default Linux settings). -# -# If you have latency problems turn this to "yes". Otherwise leave it as -# "no" that is the safest pick from the point of view of durability. - -no-appendfsync-on-rewrite no - -# Automatic rewrite of the append only file. -# Redis is able to automatically rewrite the log file implicitly calling -# BGREWRITEAOF when the AOF log size grows by the specified percentage. -# -# This is how it works: Redis remembers the size of the AOF file after the -# latest rewrite (if no rewrite has happened since the restart, the size of -# the AOF at startup is used). -# -# This base size is compared to the current size. If the current size is -# bigger than the specified percentage, the rewrite is triggered. Also -# you need to specify a minimal size for the AOF file to be rewritten, this -# is useful to avoid rewriting the AOF file even if the percentage increase -# is reached but it is still pretty small. -# -# Specify a percentage of zero in order to disable the automatic AOF -# rewrite feature. - -auto-aof-rewrite-percentage 100 -auto-aof-rewrite-min-size 64mb - -# An AOF file may be found to be truncated at the end during the Redis -# startup process, when the AOF data gets loaded back into memory. -# This may happen when the system where Redis is running -# crashes, especially when an ext4 filesystem is mounted without the -# data=ordered option (however this can't happen when Redis itself -# crashes or aborts but the operating system still works correctly). -# -# Redis can either exit with an error when this happens, or load as much -# data as possible (the default now) and start if the AOF file is found -# to be truncated at the end. The following option controls this behavior. -# -# If aof-load-truncated is set to yes, a truncated AOF file is loaded and -# the Redis server starts emitting a log to inform the user of the event. -# Otherwise if the option is set to no, the server aborts with an error -# and refuses to start. When the option is set to no, the user requires -# to fix the AOF file using the "redis-check-aof" utility before to restart -# the server. -# -# Note that if the AOF file will be found to be corrupted in the middle -# the server will still exit with an error. This option only applies when -# Redis will try to read more data from the AOF file but not enough bytes -# will be found. -aof-load-truncated yes - -# When rewriting the AOF file, Redis is able to use an RDB preamble in the -# AOF file for faster rewrites and recoveries. When this option is turned -# on the rewritten AOF file is composed of two different stanzas: -# -# [RDB file][AOF tail] -# -# When loading Redis recognizes that the AOF file starts with the "REDIS" -# string and loads the prefixed RDB file, and continues loading the AOF -# tail. -aof-use-rdb-preamble yes - -################################ LUA SCRIPTING ############################### - -# Max execution time of a Lua script in milliseconds. -# -# If the maximum execution time is reached Redis will log that a script is -# still in execution after the maximum allowed time and will start to -# reply to queries with an error. -# -# When a long running script exceeds the maximum execution time only the -# SCRIPT KILL and SHUTDOWN NOSAVE commands are available. The first can be -# used to stop a script that did not yet called write commands. The second -# is the only way to shut down the server in the case a write command was -# already issued by the script but the user doesn't want to wait for the natural -# termination of the script. -# -# Set it to 0 or a negative value for unlimited execution without warnings. -lua-time-limit 5000 - -################################ REDIS CLUSTER ############################### - -# Normal Redis instances can't be part of a Redis Cluster; only nodes that are -# started as cluster nodes can. In order to start a Redis instance as a -# cluster node enable the cluster support uncommenting the following: -# -# cluster-enabled yes - -# Every cluster node has a cluster configuration file. This file is not -# intended to be edited by hand. It is created and updated by Redis nodes. -# Every Redis Cluster node requires a different cluster configuration file. -# Make sure that instances running in the same system do not have -# overlapping cluster configuration file names. -# -# cluster-config-file nodes-6379.conf - -# Cluster node timeout is the amount of milliseconds a node must be unreachable -# for it to be considered in failure state. -# Most other internal time limits are multiple of the node timeout. -# -# cluster-node-timeout 15000 - -# A replica of a failing master will avoid to start a failover if its data -# looks too old. -# -# There is no simple way for a replica to actually have an exact measure of -# its "data age", so the following two checks are performed: -# -# 1) If there are multiple replicas able to failover, they exchange messages -# in order to try to give an advantage to the replica with the best -# replication offset (more data from the master processed). -# Replicas will try to get their rank by offset, and apply to the start -# of the failover a delay proportional to their rank. -# -# 2) Every single replica computes the time of the last interaction with -# its master. This can be the last ping or command received (if the master -# is still in the "connected" state), or the time that elapsed since the -# disconnection with the master (if the replication link is currently down). -# If the last interaction is too old, the replica will not try to failover -# at all. -# -# The point "2" can be tuned by user. Specifically a replica will not perform -# the failover if, since the last interaction with the master, the time -# elapsed is greater than: -# -# (node-timeout * replica-validity-factor) + repl-ping-replica-period -# -# So for example if node-timeout is 30 seconds, and the replica-validity-factor -# is 10, and assuming a default repl-ping-replica-period of 10 seconds, the -# replica will not try to failover if it was not able to talk with the master -# for longer than 310 seconds. -# -# A large replica-validity-factor may allow replicas with too old data to failover -# a master, while a too small value may prevent the cluster from being able to -# elect a replica at all. -# -# For maximum availability, it is possible to set the replica-validity-factor -# to a value of 0, which means, that replicas will always try to failover the -# master regardless of the last time they interacted with the master. -# (However they'll always try to apply a delay proportional to their -# offset rank). -# -# Zero is the only value able to guarantee that when all the partitions heal -# the cluster will always be able to continue. -# -# cluster-replica-validity-factor 10 - -# Cluster replicas are able to migrate to orphaned masters, that are masters -# that are left without working replicas. This improves the cluster ability -# to resist to failures as otherwise an orphaned master can't be failed over -# in case of failure if it has no working replicas. -# -# Replicas migrate to orphaned masters only if there are still at least a -# given number of other working replicas for their old master. This number -# is the "migration barrier". A migration barrier of 1 means that a replica -# will migrate only if there is at least 1 other working replica for its master -# and so forth. It usually reflects the number of replicas you want for every -# master in your cluster. -# -# Default is 1 (replicas migrate only if their masters remain with at least -# one replica). To disable migration just set it to a very large value. -# A value of 0 can be set but is useful only for debugging and dangerous -# in production. -# -# cluster-migration-barrier 1 - -# By default Redis Cluster nodes stop accepting queries if they detect there -# is at least an hash slot uncovered (no available node is serving it). -# This way if the cluster is partially down (for example a range of hash slots -# are no longer covered) all the cluster becomes, eventually, unavailable. -# It automatically returns available as soon as all the slots are covered again. -# -# However sometimes you want the subset of the cluster which is working, -# to continue to accept queries for the part of the key space that is still -# covered. In order to do so, just set the cluster-require-full-coverage -# option to no. -# -# cluster-require-full-coverage yes - -# This option, when set to yes, prevents replicas from trying to failover its -# master during master failures. However the master can still perform a -# manual failover, if forced to do so. -# -# This is useful in different scenarios, especially in the case of multiple -# data center operations, where we want one side to never be promoted if not -# in the case of a total DC failure. -# -# cluster-replica-no-failover no - -# In order to setup your cluster make sure to read the documentation -# available at http://redis.io web site. - -########################## CLUSTER DOCKER/NAT support ######################## - -# In certain deployments, Redis Cluster nodes address discovery fails, because -# addresses are NAT-ted or because ports are forwarded (the typical case is -# Docker and other containers). -# -# In order to make Redis Cluster working in such environments, a static -# configuration where each node knows its public address is needed. The -# following two options are used for this scope, and are: -# -# * cluster-announce-ip -# * cluster-announce-port -# * cluster-announce-bus-port -# -# Each instruct the node about its address, client port, and cluster message -# bus port. The information is then published in the header of the bus packets -# so that other nodes will be able to correctly map the address of the node -# publishing the information. -# -# If the above options are not used, the normal Redis Cluster auto-detection -# will be used instead. -# -# Note that when remapped, the bus port may not be at the fixed offset of -# clients port + 10000, so you can specify any port and bus-port depending -# on how they get remapped. If the bus-port is not set, a fixed offset of -# 10000 will be used as usually. -# -# Example: -# -# cluster-announce-ip 10.1.1.5 -# cluster-announce-port 6379 -# cluster-announce-bus-port 6380 - -################################## SLOW LOG ################################### - -# The Redis Slow Log is a system to log queries that exceeded a specified -# execution time. The execution time does not include the I/O operations -# like talking with the client, sending the reply and so forth, -# but just the time needed to actually execute the command (this is the only -# stage of command execution where the thread is blocked and can not serve -# other requests in the meantime). -# -# You can configure the slow log with two parameters: one tells Redis -# what is the execution time, in microseconds, to exceed in order for the -# command to get logged, and the other parameter is the length of the -# slow log. When a new command is logged the oldest one is removed from the -# queue of logged commands. - -# The following time is expressed in microseconds, so 1000000 is equivalent -# to one second. Note that a negative number disables the slow log, while -# a value of zero forces the logging of every command. -slowlog-log-slower-than 10000 - -# There is no limit to this length. Just be aware that it will consume memory. -# You can reclaim memory used by the slow log with SLOWLOG RESET. -slowlog-max-len 128 - -################################ LATENCY MONITOR ############################## - -# The Redis latency monitoring subsystem samples different operations -# at runtime in order to collect data related to possible sources of -# latency of a Redis instance. -# -# Via the LATENCY command this information is available to the user that can -# print graphs and obtain reports. -# -# The system only logs operations that were performed in a time equal or -# greater than the amount of milliseconds specified via the -# latency-monitor-threshold configuration directive. When its value is set -# to zero, the latency monitor is turned off. -# -# By default latency monitoring is disabled since it is mostly not needed -# if you don't have latency issues, and collecting data has a performance -# impact, that while very small, can be measured under big load. Latency -# monitoring can easily be enabled at runtime using the command -# "CONFIG SET latency-monitor-threshold " if needed. -latency-monitor-threshold 0 - -############################# EVENT NOTIFICATION ############################## - -# Redis can notify Pub/Sub clients about events happening in the key space. -# This feature is documented at http://redis.io/topics/notifications -# -# For instance if keyspace events notification is enabled, and a client -# performs a DEL operation on key "foo" stored in the Database 0, two -# messages will be published via Pub/Sub: -# -# PUBLISH __keyspace@0__:foo del -# PUBLISH __keyevent@0__:del foo -# -# It is possible to select the events that Redis will notify among a set -# of classes. Every class is identified by a single character: -# -# K Keyspace events, published with __keyspace@__ prefix. -# E Keyevent events, published with __keyevent@__ prefix. -# g Generic commands (non-type specific) like DEL, EXPIRE, RENAME, ... -# $ String commands -# l List commands -# s Set commands -# h Hash commands -# z Sorted set commands -# x Expired events (events generated every time a key expires) -# e Evicted events (events generated when a key is evicted for maxmemory) -# A Alias for g$lshzxe, so that the "AKE" string means all the events. -# -# The "notify-keyspace-events" takes as argument a string that is composed -# of zero or multiple characters. The empty string means that notifications -# are disabled. -# -# Example: to enable list and generic events, from the point of view of the -# event name, use: -# -# notify-keyspace-events Elg -# -# Example 2: to get the stream of the expired keys subscribing to channel -# name __keyevent@0__:expired use: -# -# notify-keyspace-events Ex -# -# By default all notifications are disabled because most users don't need -# this feature and the feature has some overhead. Note that if you don't -# specify at least one of K or E, no events will be delivered. -notify-keyspace-events "" - -############################### ADVANCED CONFIG ############################### - -# Hashes are encoded using a memory efficient data structure when they have a -# small number of entries, and the biggest entry does not exceed a given -# threshold. These thresholds can be configured using the following directives. -hash-max-ziplist-entries 512 -hash-max-ziplist-value 64 - -# Lists are also encoded in a special way to save a lot of space. -# The number of entries allowed per internal list node can be specified -# as a fixed maximum size or a maximum number of elements. -# For a fixed maximum size, use -5 through -1, meaning: -# -5: max size: 64 Kb <-- not recommended for normal workloads -# -4: max size: 32 Kb <-- not recommended -# -3: max size: 16 Kb <-- probably not recommended -# -2: max size: 8 Kb <-- good -# -1: max size: 4 Kb <-- good -# Positive numbers mean store up to _exactly_ that number of elements -# per list node. -# The highest performing option is usually -2 (8 Kb size) or -1 (4 Kb size), -# but if your use case is unique, adjust the settings as necessary. -list-max-ziplist-size -2 - -# Lists may also be compressed. -# Compress depth is the number of quicklist ziplist nodes from *each* side of -# the list to *exclude* from compression. The head and tail of the list -# are always uncompressed for fast push/pop operations. Settings are: -# 0: disable all list compression -# 1: depth 1 means "don't start compressing until after 1 node into the list, -# going from either the head or tail" -# So: [head]->node->node->...->node->[tail] -# [head], [tail] will always be uncompressed; inner nodes will compress. -# 2: [head]->[next]->node->node->...->node->[prev]->[tail] -# 2 here means: don't compress head or head->next or tail->prev or tail, -# but compress all nodes between them. -# 3: [head]->[next]->[next]->node->node->...->node->[prev]->[prev]->[tail] -# etc. -list-compress-depth 0 - -# Sets have a special encoding in just one case: when a set is composed -# of just strings that happen to be integers in radix 10 in the range -# of 64 bit signed integers. -# The following configuration setting sets the limit in the size of the -# set in order to use this special memory saving encoding. -set-max-intset-entries 512 - -# Similarly to hashes and lists, sorted sets are also specially encoded in -# order to save a lot of space. This encoding is only used when the length and -# elements of a sorted set are below the following limits: -zset-max-ziplist-entries 128 -zset-max-ziplist-value 64 - -# HyperLogLog sparse representation bytes limit. The limit includes the -# 16 bytes header. When an HyperLogLog using the sparse representation crosses -# this limit, it is converted into the dense representation. -# -# A value greater than 16000 is totally useless, since at that point the -# dense representation is more memory efficient. -# -# The suggested value is ~ 3000 in order to have the benefits of -# the space efficient encoding without slowing down too much PFADD, -# which is O(N) with the sparse encoding. The value can be raised to -# ~ 10000 when CPU is not a concern, but space is, and the data set is -# composed of many HyperLogLogs with cardinality in the 0 - 15000 range. -hll-sparse-max-bytes 3000 - -# Streams macro node max size / items. The stream data structure is a radix -# tree of big nodes that encode multiple items inside. Using this configuration -# it is possible to configure how big a single node can be in bytes, and the -# maximum number of items it may contain before switching to a new node when -# appending new stream entries. If any of the following settings are set to -# zero, the limit is ignored, so for instance it is possible to set just a -# max entires limit by setting max-bytes to 0 and max-entries to the desired -# value. -stream-node-max-bytes 4096 -stream-node-max-entries 100 - -# Active rehashing uses 1 millisecond every 100 milliseconds of CPU time in -# order to help rehashing the main Redis hash table (the one mapping top-level -# keys to values). The hash table implementation Redis uses (see dict.c) -# performs a lazy rehashing: the more operation you run into a hash table -# that is rehashing, the more rehashing "steps" are performed, so if the -# server is idle the rehashing is never complete and some more memory is used -# by the hash table. -# -# The default is to use this millisecond 10 times every second in order to -# actively rehash the main dictionaries, freeing memory when possible. -# -# If unsure: -# use "activerehashing no" if you have hard latency requirements and it is -# not a good thing in your environment that Redis can reply from time to time -# to queries with 2 milliseconds delay. -# -# use "activerehashing yes" if you don't have such hard requirements but -# want to free memory asap when possible. -activerehashing yes - -# The client output buffer limits can be used to force disconnection of clients -# that are not reading data from the server fast enough for some reason (a -# common reason is that a Pub/Sub client can't consume messages as fast as the -# publisher can produce them). -# -# The limit can be set differently for the three different classes of clients: -# -# normal -> normal clients including MONITOR clients -# replica -> replica clients -# pubsub -> clients subscribed to at least one pubsub channel or pattern -# -# The syntax of every client-output-buffer-limit directive is the following: -# -# client-output-buffer-limit -# -# A client is immediately disconnected once the hard limit is reached, or if -# the soft limit is reached and remains reached for the specified number of -# seconds (continuously). -# So for instance if the hard limit is 32 megabytes and the soft limit is -# 16 megabytes / 10 seconds, the client will get disconnected immediately -# if the size of the output buffers reach 32 megabytes, but will also get -# disconnected if the client reaches 16 megabytes and continuously overcomes -# the limit for 10 seconds. -# -# By default normal clients are not limited because they don't receive data -# without asking (in a push way), but just after a request, so only -# asynchronous clients may create a scenario where data is requested faster -# than it can read. -# -# Instead there is a default limit for pubsub and replica clients, since -# subscribers and replicas receive data in a push fashion. -# -# Both the hard or the soft limit can be disabled by setting them to zero. -client-output-buffer-limit normal 0 0 0 -client-output-buffer-limit replica 256mb 64mb 60 -client-output-buffer-limit pubsub 32mb 8mb 60 - -# Client query buffers accumulate new commands. They are limited to a fixed -# amount by default in order to avoid that a protocol desynchronization (for -# instance due to a bug in the client) will lead to unbound memory usage in -# the query buffer. However you can configure it here if you have very special -# needs, such us huge multi/exec requests or alike. -# -# client-query-buffer-limit 1gb - -# In the Redis protocol, bulk requests, that are, elements representing single -# strings, are normally limited ot 512 mb. However you can change this limit -# here. -# -# proto-max-bulk-len 512mb - -# Redis calls an internal function to perform many background tasks, like -# closing connections of clients in timeout, purging expired keys that are -# never requested, and so forth. -# -# Not all tasks are performed with the same frequency, but Redis checks for -# tasks to perform according to the specified "hz" value. -# -# By default "hz" is set to 10. Raising the value will use more CPU when -# Redis is idle, but at the same time will make Redis more responsive when -# there are many keys expiring at the same time, and timeouts may be -# handled with more precision. -# -# The range is between 1 and 500, however a value over 100 is usually not -# a good idea. Most users should use the default of 10 and raise this up to -# 100 only in environments where very low latency is required. -hz 10 - -# Normally it is useful to have an HZ value which is proportional to the -# number of clients connected. This is useful in order, for instance, to -# avoid too many clients are processed for each background task invocation -# in order to avoid latency spikes. -# -# Since the default HZ value by default is conservatively set to 10, Redis -# offers, and enables by default, the ability to use an adaptive HZ value -# which will temporary raise when there are many connected clients. -# -# When dynamic HZ is enabled, the actual configured HZ will be used as -# as a baseline, but multiples of the configured HZ value will be actually -# used as needed once more clients are connected. In this way an idle -# instance will use very little CPU time while a busy instance will be -# more responsive. -dynamic-hz yes - -# When a child rewrites the AOF file, if the following option is enabled -# the file will be fsync-ed every 32 MB of data generated. This is useful -# in order to commit the file to the disk more incrementally and avoid -# big latency spikes. -aof-rewrite-incremental-fsync yes - -# When redis saves RDB file, if the following option is enabled -# the file will be fsync-ed every 32 MB of data generated. This is useful -# in order to commit the file to the disk more incrementally and avoid -# big latency spikes. -rdb-save-incremental-fsync yes - -# Redis LFU eviction (see maxmemory setting) can be tuned. However it is a good -# idea to start with the default settings and only change them after investigating -# how to improve the performances and how the keys LFU change over time, which -# is possible to inspect via the OBJECT FREQ command. -# -# There are two tunable parameters in the Redis LFU implementation: the -# counter logarithm factor and the counter decay time. It is important to -# understand what the two parameters mean before changing them. -# -# The LFU counter is just 8 bits per key, it's maximum value is 255, so Redis -# uses a probabilistic increment with logarithmic behavior. Given the value -# of the old counter, when a key is accessed, the counter is incremented in -# this way: -# -# 1. A random number R between 0 and 1 is extracted. -# 2. A probability P is calculated as 1/(old_value*lfu_log_factor+1). -# 3. The counter is incremented only if R < P. -# -# The default lfu-log-factor is 10. This is a table of how the frequency -# counter changes with a different number of accesses with different -# logarithmic factors: -# -# +--------+------------+------------+------------+------------+------------+ -# | factor | 100 hits | 1000 hits | 100K hits | 1M hits | 10M hits | -# +--------+------------+------------+------------+------------+------------+ -# | 0 | 104 | 255 | 255 | 255 | 255 | -# +--------+------------+------------+------------+------------+------------+ -# | 1 | 18 | 49 | 255 | 255 | 255 | -# +--------+------------+------------+------------+------------+------------+ -# | 10 | 10 | 18 | 142 | 255 | 255 | -# +--------+------------+------------+------------+------------+------------+ -# | 100 | 8 | 11 | 49 | 143 | 255 | -# +--------+------------+------------+------------+------------+------------+ -# -# NOTE: The above table was obtained by running the following commands: -# -# redis-benchmark -n 1000000 incr foo -# redis-cli object freq foo -# -# NOTE 2: The counter initial value is 5 in order to give new objects a chance -# to accumulate hits. -# -# The counter decay time is the time, in minutes, that must elapse in order -# for the key counter to be divided by two (or decremented if it has a value -# less <= 10). -# -# The default value for the lfu-decay-time is 1. A Special value of 0 means to -# decay the counter every time it happens to be scanned. -# -# lfu-log-factor 10 -# lfu-decay-time 1 - -########################### ACTIVE DEFRAGMENTATION ####################### -# -# WARNING THIS FEATURE IS EXPERIMENTAL. However it was stress tested -# even in production and manually tested by multiple engineers for some -# time. -# -# What is active defragmentation? -# ------------------------------- -# -# Active (online) defragmentation allows a Redis server to compact the -# spaces left between small allocations and deallocations of data in memory, -# thus allowing to reclaim back memory. -# -# Fragmentation is a natural process that happens with every allocator (but -# less so with Jemalloc, fortunately) and certain workloads. Normally a server -# restart is needed in order to lower the fragmentation, or at least to flush -# away all the data and create it again. However thanks to this feature -# implemented by Oran Agra for Redis 4.0 this process can happen at runtime -# in an "hot" way, while the server is running. -# -# Basically when the fragmentation is over a certain level (see the -# configuration options below) Redis will start to create new copies of the -# values in contiguous memory regions by exploiting certain specific Jemalloc -# features (in order to understand if an allocation is causing fragmentation -# and to allocate it in a better place), and at the same time, will release the -# old copies of the data. This process, repeated incrementally for all the keys -# will cause the fragmentation to drop back to normal values. -# -# Important things to understand: -# -# 1. This feature is disabled by default, and only works if you compiled Redis -# to use the copy of Jemalloc we ship with the source code of Redis. -# This is the default with Linux builds. -# -# 2. You never need to enable this feature if you don't have fragmentation -# issues. -# -# 3. Once you experience fragmentation, you can enable this feature when -# needed with the command "CONFIG SET activedefrag yes". -# -# The configuration parameters are able to fine tune the behavior of the -# defragmentation process. If you are not sure about what they mean it is -# a good idea to leave the defaults untouched. - -# Enabled active defragmentation -# activedefrag yes - -# Minimum amount of fragmentation waste to start active defrag -# active-defrag-ignore-bytes 100mb - -# Minimum percentage of fragmentation to start active defrag -# active-defrag-threshold-lower 10 - -# Maximum percentage of fragmentation at which we use maximum effort -# active-defrag-threshold-upper 100 - -# Minimal effort for defrag in CPU percentage -# active-defrag-cycle-min 5 - -# Maximal effort for defrag in CPU percentage -# active-defrag-cycle-max 75 - -# Maximum number of set/hash/zset/list fields that will be processed from -# the main dictionary scan -# active-defrag-max-scan-fields 1000 - -# It is possible to pin different threads and processes of Redis to specific -# CPUs in your system, in order to maximize the performances of the server. -# This is useful both in order to pin different Redis threads in different -# CPUs, but also in order to make sure that multiple Redis instances running -# in the same host will be pinned to different CPUs. -# -# Normally you can do this using the "taskset" command, however it is also -# possible to this via Redis configuration directly, both in Linux and FreeBSD. -# -# You can pin the server/IO threads, bio threads, aof rewrite child process, and -# the bgsave child process. The syntax to specify the cpu list is the same as -# the taskset command: -# -# Set redis server/io threads to cpu affinity 0,2,4,6: -# server_cpulist 0-7:2 -# -# Set bio threads to cpu affinity 1,3: -# bio_cpulist 1,3 -# -# Set aof rewrite child process to cpu affinity 8,9,10,11: -# aof_rewrite_cpulist 8-11 -# -# Set bgsave child process to cpu affinity 1,10,11 -# bgsave_cpulist 1,10-11 - -# In some cases redis will emit warnings and even refuse to start if it detects -# that the system is in bad state, it is possible to suppress these warnings -# by setting the following config which takes a space delimited list of warnings -# to suppress -# -# ignore-warnings ARM64-COW-BUG diff --git a/docker/conf/uwsgi.ini b/docker/conf/uwsgi.ini index e7f505d6..db0d8732 100644 --- a/docker/conf/uwsgi.ini +++ b/docker/conf/uwsgi.ini @@ -1,7 +1,7 @@ [uwsgi] -http-socket=0.0.0.0:8000 -env=PYTHONPATH=/code/src +http-socket=0.0.0.0:8888 +virtualenv=/venv enable-threads=0 honour-range=1 master=1 diff --git a/docs/_hooks/hooks.py b/docs/_hooks/hooks.py new file mode 100644 index 00000000..0f9f746a --- /dev/null +++ b/docs/_hooks/hooks.py @@ -0,0 +1,24 @@ +# mypy: disable-error-code="typeddict-unknown-key" +from datetime import datetime + +from mkdocs.config.defaults import MkDocsConfig +from mkdocs.structure.files import Files +from mkdocs.structure.nav import Navigation +from mkdocs.structure.pages import Page +from mkdocs.utils.templates import TemplateContext + + +def on_pre_build(config: MkDocsConfig) -> None: + pass + + +def on_page_markdown( + markdown: str, page: Page, config: MkDocsConfig, files: Files +) -> None: + pass + + +def on_page_context( + context: TemplateContext, nav: Navigation, page: Page, config: MkDocsConfig +) -> None: + context["build_date"] = datetime.now().strftime("%a, %d, %b %Y - %H:%M") diff --git a/docs/_scripts/get_settings.py b/docs/_scripts/get_settings.py new file mode 100644 index 00000000..959957dd --- /dev/null +++ b/docs/_scripts/get_settings.py @@ -0,0 +1,40 @@ +import mkdocs_gen_files + +from aurora.config import env + +MD_HEADER = """# Setttings + +""" +MD_LINE = """ +### {key} +_Default_: `{default_value}` + +{help} + +""" +DEV_LINE = """ +__Suggested value for development__: `{develop_value}` +""" + +OUTFILE = "settings.md" +with mkdocs_gen_files.open(OUTFILE, "w") as f: + f.write(MD_HEADER) + for entry, cfg in sorted(env.config.items()): + f.write( + MD_LINE.format( + key=entry, + default_value=cfg["default"], + develop_value=env.get_develop_value(entry), + help=cfg["help"], + ) + ) + if env.get_develop_value(entry): + f.write( + DEV_LINE.format( + key=entry, + default_value=cfg["default"], + develop_value=env.get_develop_value(entry), + help=cfg["help"], + ) + ) +mkdocs_gen_files.set_edit_path(OUTFILE, "get_settings.py") diff --git a/docs/_theme/css/style.css b/docs/_theme/css/style.css new file mode 100644 index 00000000..b29ac332 --- /dev/null +++ b/docs/_theme/css/style.css @@ -0,0 +1,15 @@ +.align-center { + align-content: center; + text-align: center; + width: 100%; +} + +.md-typeset__table { + width: 100%; +} + +.md-typeset__table table:not([class]) { + display: table; +} + +/*# sourceMappingURL=style.css.map */ diff --git a/docs/_theme/css/style.css.map b/docs/_theme/css/style.css.map new file mode 100644 index 00000000..fa5bb018 --- /dev/null +++ b/docs/_theme/css/style.css.map @@ -0,0 +1 @@ +{"version":3,"sourceRoot":"","sources":["style.scss"],"names":[],"mappings":"AAAA;EACE;EACA;EACA;;;AAEF;EACE;;;AAGF;EACE","file":"style.css"} diff --git a/docs/_theme/css/style.scss b/docs/_theme/css/style.scss new file mode 100644 index 00000000..ae341fe1 --- /dev/null +++ b/docs/_theme/css/style.scss @@ -0,0 +1,12 @@ +.align-center{ + align-content: center; + text-align: center; + width: 100%; +} +.md-typeset__table { + width: 100%; +} + +.md-typeset__table table:not([class]) { + display: table +} diff --git a/docs/_theme/js/address.js b/docs/_theme/js/address.js new file mode 100644 index 00000000..58e96528 --- /dev/null +++ b/docs/_theme/js/address.js @@ -0,0 +1,50 @@ +const clickHandler = function () { + let currentAddr = Cookies.get('address') || "https://127.0.0.1/"; + let addr = prompt("Set your HOPE server address", currentAddr); + Cookies.set('address', addr, currentAddr); + location.reload(); +}; +const setAddress = function () { + let cookieAddr = Cookies.get('address'); + if (!cookieAddr) { + cookieAddr = "[SERVER_ADDRESS]" + } + for (const cell of document.getElementsByTagName('code')) { + cell.innerHTML = cell.innerHTML.replace('[SERVER_ADDRESS]', cookieAddr); + } +}; +// addEventListener('click', function(e) { +// setTimeout(setAddress, 500); +// }) +addEventListener('load', function (e) { + setAddress(); + let btn = document.getElementById("set-address"); + if (btn) { + btn.addEventListener('click', clickHandler); + } +}); + +var open = window.XMLHttpRequest.prototype.open, + send = window.XMLHttpRequest.prototype.send, onReadyStateChange; + +function sendReplacement(data) { + console.warn('Sending HTTP request data : ', data); + + if (this.onreadystatechange) { + this._onreadystatechange = this.onreadystatechange; + } + this.onreadystatechange = onReadyStateChangeReplacement; + return send.apply(this, arguments); +} + +function onReadyStateChangeReplacement() { + if (this.readyState === XMLHttpRequest.DONE) { + setTimeout(setAddress, 100); + } + if (this._onreadystatechange) { + return this._onreadystatechange.apply(this, arguments); + } + +} + +window.XMLHttpRequest.prototype.send = sendReplacement; diff --git a/docs/_theme/js/js.cookie.js b/docs/_theme/js/js.cookie.js new file mode 100644 index 00000000..eac68211 --- /dev/null +++ b/docs/_theme/js/js.cookie.js @@ -0,0 +1,59 @@ +/*! js-cookie v3.0.5 | MIT */ +!function (e, t) { + "object" == typeof exports && "undefined" != typeof module ? module.exports = t() : "function" == typeof define && define.amd ? define(t) : (e = "undefined" != typeof globalThis ? globalThis : e || self, function () { + var n = e.Cookies, o = e.Cookies = t(); + o.noConflict = function () { + return e.Cookies = n, o + } + }()) +}(this, (function () { + "use strict"; + + function e(e) { + for (var t = 1; t < arguments.length; t++) { + var n = arguments[t]; + for (var o in n) e[o] = n[o] + } + return e + } + + var t = function t(n, o) { + function r(t, r, i) { + if ("undefined" != typeof document) { + "number" == typeof (i = e({}, o, i)).expires && (i.expires = new Date(Date.now() + 864e5 * i.expires)), i.expires && (i.expires = i.expires.toUTCString()), t = encodeURIComponent(t).replace(/%(2[346B]|5E|60|7C)/g, decodeURIComponent).replace(/[()]/g, escape); + var c = ""; + for (var u in i) i[u] && (c += "; " + u, !0 !== i[u] && (c += "=" + i[u].split(";")[0])); + return document.cookie = t + "=" + n.write(r, t) + c + } + } + + return Object.create({ + set: r, get: function (e) { + if ("undefined" != typeof document && (!arguments.length || e)) { + for (var t = document.cookie ? document.cookie.split("; ") : [], o = {}, r = 0; r < t.length; r++) { + var i = t[r].split("="), c = i.slice(1).join("="); + try { + var u = decodeURIComponent(i[0]); + if (o[u] = n.read(c, u), e === u) break + } catch (e) { + } + } + return e ? o[e] : o + } + }, remove: function (t, n) { + r(t, "", e({}, n, {expires: -1})) + }, withAttributes: function (n) { + return t(this.converter, e({}, this.attributes, n)) + }, withConverter: function (n) { + return t(e({}, this.converter, n), this.attributes) + } + }, {attributes: {value: Object.freeze(o)}, converter: {value: Object.freeze(n)}}) + }({ + read: function (e) { + return '"' === e[0] && (e = e.slice(1, -1)), e.replace(/(%[\dA-F]{2})+/gi, decodeURIComponent) + }, write: function (e) { + return encodeURIComponent(e).replace(/%(2[346BF]|3[AC-F]|40|5[BDE]|60|7[BCD])/g, decodeURIComponent) + } + }, {path: "/"}); + return t +})); diff --git a/docs/_theme/overrides/term.html b/docs/_theme/overrides/term.html new file mode 100644 index 00000000..d0be1ed0 --- /dev/null +++ b/docs/_theme/overrides/term.html @@ -0,0 +1,5 @@ +{% extends "base.html" %} +{%- block content %} +
glossary / {{ page.title }}
+ {{ super() }} +{%- endblock %} diff --git a/docs/src/.gitignore b/docs/src/.gitignore new file mode 100644 index 00000000..65f559bf --- /dev/null +++ b/docs/src/.gitignore @@ -0,0 +1,3 @@ +!**/.pages +!.includes +_theme/.templates diff --git a/docs/src/.pages b/docs/src/.pages new file mode 100644 index 00000000..073f4793 --- /dev/null +++ b/docs/src/.pages @@ -0,0 +1,5 @@ +nav: + - Home: index.md + - contributing.md + - run + - settings.md diff --git a/docs/src/contributing.md b/docs/src/contributing.md new file mode 100644 index 00000000..b69a44bd --- /dev/null +++ b/docs/src/contributing.md @@ -0,0 +1,59 @@ +# Contributing + + +Install [uv](https://docs.astral.sh/uv/) + + + git clone https://github.com/unicef/hope-aurora + uv venv .venv --python 3.12 + source .venv/bin/activate + uv sync --all-extras + pre-commit install --hook-type pre-commit --hook-type pre-push + + +## Run tests + + pytests tests + +## Run Selenium tests (ONLY) + + pytests tests -m selenium + + +## Run Selenium any tests + + pytests tests --selenium + + +!!! note + + You can disable selenium headless mode (show the browser activity on the screen) using `--show-browser` flag + + + + +## Run local server + + + ./manage.py runserver + + +!!! note + + To facililate developing you can use: + + export AUTHENTICATION_BACKENDS="aurora.security.backends.AnyUserAuthBackend" + + It works only if `DEBUG=True` + + + +## Docker compose + +Alternatively you can use provided docker compose for development + + docker compose up + +Alternatively you can use provided docker compose for development + + docker compose up diff --git a/docs/src/docker.md b/docs/src/docker.md new file mode 100644 index 00000000..1b94a3de --- /dev/null +++ b/docs/src/docker.md @@ -0,0 +1,19 @@ +# Build and use your docker + +After you have cloned the repo, be sure to have a Reddis and PostgreSQL server running on your machine + + export ADMIN_EMAIL=admin@example.com + export ADMIN_PASSWORD=password + export DATABASE_URL=postgres://postgres:@127.0.0.1:5432/aurora + export CACHE_URL=redis://127.0.0.1:6379/1?client_class=django_redis.client.DefaultClient + + cd docker + + make build run + + +## Use provided compose.yml + + docker compose up + +navigate to http://localhost:8000/admin/ and login using `admin@example.com/password` diff --git a/docs/src/img/Screenshot 2024-04-14 at 10.25.01.png b/docs/src/img/Screenshot 2024-04-14 at 10.25.01.png new file mode 100644 index 00000000..de26638f Binary files /dev/null and b/docs/src/img/Screenshot 2024-04-14 at 10.25.01.png differ diff --git a/docs/src/img/favicon.ico b/docs/src/img/favicon.ico new file mode 100644 index 00000000..27ff3f19 Binary files /dev/null and b/docs/src/img/favicon.ico differ diff --git a/docs/src/img/logo.png b/docs/src/img/logo.png new file mode 100644 index 00000000..c799e945 Binary files /dev/null and b/docs/src/img/logo.png differ diff --git a/docs/src/img/logo2.png b/docs/src/img/logo2.png new file mode 100644 index 00000000..0b88a181 Binary files /dev/null and b/docs/src/img/logo2.png differ diff --git a/docs/src/img/name.png b/docs/src/img/name.png new file mode 100644 index 00000000..e9c8dfa5 Binary files /dev/null and b/docs/src/img/name.png differ diff --git a/docs/src/index.md b/docs/src/index.md new file mode 100644 index 00000000..d12b7be3 --- /dev/null +++ b/docs/src/index.md @@ -0,0 +1,19 @@ +--- +title: Documentation +--- + +Aurora is the official HOPE online registration tool, it is released as open source project, feel free to contribute and use it. + +It has been development taking performance and security as main key points + +The strengths of Aurora are: + +- Incredibly fast (reached 15.000 registrations per second) +- Security (supports both +- Powerful form builder +- Support complex validation logic +- Database level user defined per form unique keys +- Multilanguage +- Open and protected online registrations +- Straightforward architecture +- Custom form page (partner logos, header, footer, colors...) diff --git a/docs/src/run/.pages b/docs/src/run/.pages new file mode 100644 index 00000000..7a35c547 --- /dev/null +++ b/docs/src/run/.pages @@ -0,0 +1,3 @@ +nav: + - Development: dev.md + - Config: config.md diff --git a/docs/src/run/config.md b/docs/src/run/config.md new file mode 100644 index 00000000..64e4c7a1 --- /dev/null +++ b/docs/src/run/config.md @@ -0,0 +1,7 @@ +# Setup HOPE integration + +- Add aurora_token in the user +- Add aurora_server in the Constance Config +- Fetch data from Aurora +- Associate Organizations to Business Areas +- Associate Projects to Programmes diff --git a/docs/src/run/dev.md b/docs/src/run/dev.md new file mode 100644 index 00000000..96b95f96 --- /dev/null +++ b/docs/src/run/dev.md @@ -0,0 +1,26 @@ +# Run Development Version + +!!! warning + + This is an unsecure development configuration. + DO NOT USE IN PRODUCTION OR + + +To locally run stable not officially released version, simply + + docker run \ + --rm \ + -p 8000:8000 \ + -e ADMIN_EMAIL="${ADMIN_EMAIL}" \ + -e ADMIN_PASSWORD="${ADMIN_PASSWORD}" \ + -e ALLOWED_HOSTS="*" \ + -e CACHE_URL="redis://[REDIS_SERVER]:[PORT]/0" \ + -e CSRF_COOKIE_SECURE=False \ + -e CSRF_TRUSTED_ORIGINS=http://localhost \ + -e DATABASE_URL="${DATABASE_URL}" \ + -e DEBUG="1" \ + -e DJANGO_SETTINGS_MODULE=aurora.config.settings \ + -e LOGGING_LEVEL="DEBUG" \ + -e SECRET_KEY=${SECRET_KEY} \ + -e SENTRY_DSN="${SENTRY_DSN}" \ + unicef/hope-aurora:develop diff --git a/exorcist.py b/exorcist.py deleted file mode 100644 index d7a87416..00000000 --- a/exorcist.py +++ /dev/null @@ -1,64 +0,0 @@ -from time import sleep, time - -import requests -import sys - - -class COLORS: - HEADER = "\033[95m" - OKBLUE = "\033[94m" - OKCYAN = "\033[96m" - SUCCESS = "\033[92m" - WARNING = "\033[93m" - FAIL = "\033[91m" - RESET = "\033[0m" - BOLD = "\033[1m" - UNDERLINE = "\033[4m" - MARK = "\xE2\x9C\x94" - MARK1 = "\u2713" - MARK2 = "\u2714\u274c" - MARK3 = "\N{check mark}" - MARK4 = "✓" - PY = "\U0001F40D" - CHECK = "\N{BALLOT BOX WITH CHECK}" - UNCHECK = "\N{BALLOT BOX}" - - -if __name__ == "__main__": - if "--random" in sys.argv: - rnd = time - else: - rnd = lambda: "" - - if len(sys.argv) == 1: - urls = ["https://register.unicef.org/"] - else: - urls = [u for u in sys.argv[1:] if u.startswith("http")] - - latest_ref = {} - latest_ver = {} - lastest_version = None - while True: - seed = rnd() - for url in urls: - ret = requests.get(f"{url}?{seed}", headers={"User-Agent": "Exorcist/1.0 "}) - ver = ret.headers.get("X-Aurora-Version", "N/A") - if lastest_version is not None: - if ver != lastest_version: - marker = COLORS.WARNING - if ver != latest_ver.get(url): - marker = COLORS.FAIL - else: - marker = COLORS.RESET - print( - f"{marker}...{url[-20:]} - {ret.status_code} - " - f"{ver:<7} - " - f"{ret.headers.get('X-Aurora-Build', 'N/A'):<16} - " - f"{ret.headers.get('X-Aurora-Time', 'N/A')} - " - f"{ret.headers.get('ETag', 'N/A')[:20]}{COLORS.RESET}" - ) - lastest_version = latest_ver[url] = ver - if len(urls) > 1: - print("=====") - - sleep(1) diff --git a/mkdocs.yaml b/mkdocs.yaml new file mode 100644 index 00000000..5cca22f7 --- /dev/null +++ b/mkdocs.yaml @@ -0,0 +1,83 @@ +copyright: Copyright © 2020-2024 UNICEF. +dev_addr: 127.0.0.1:8001 +docs_dir: docs/src +edit_uri: 'blob/develop/docs/' +repo_url: https://github.com/unicef/hope-aurora +site_author: HOPE Team +site_description: "" +site_dir: ./~build/docs +site_name: Aurora +site_url: https://unicef.github.io/unicef/hope-country-workspace/ +strict: false +exclude_docs: | + _docs/* + _hooks/* + _includes/* + _scripts/* + ~* + +markdown_extensions: + - admonition + - pymdownx.magiclink + - pymdownx.superfences: + custom_fences: + - name: mermaid + class: mermaid + format: !!python/name:pymdownx.superfences.fence_code_format + +theme: + name: "material" + color_mode: auto + custom_dir: docs/_theme/overrides + favicon: img/favicon.ico + logo: img/logo.png +# highlightjs: true +# hljs_languages: +# - yaml +# - django + user_color_mode_toggle: true + features: + - content.action.edit + - content.code.annotate + - content.code.copy + - content.tooltips + - header.autohidex + - navigation.footer + - navigation.indexes + - navigation.instant + - navigation.instant.prefetch + - navigation.instant.progress + extra: + version: + provider: mike + alias: true + palette: + # Palette toggle for light mode + - scheme: default + primary: light blue + media: "(prefers-color-scheme: light)" + toggle: + icon: material/weather-sunny + name: Switch to dark mode + + # Palette toggle for dark mode + - scheme: slate + primary: light blue + media: "(prefers-color-scheme: dark)" + toggle: + icon: material/weather-night + name: Switch to light mode + +plugins: + - mkdocstrings: + default_handler: python + - awesome-pages + - search + - gen-files: + scripts: + - docs/_scripts/get_settings.py +watch: + - src/ + +hooks: + - docs/_hooks/hooks.py diff --git a/poetry.lock b/poetry.lock deleted file mode 100644 index 45228c98..00000000 --- a/poetry.lock +++ /dev/null @@ -1,3977 +0,0 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. - -[[package]] -name = "amqp" -version = "5.1.1" -description = "Low-level AMQP client for Python (fork of amqplib)." -optional = false -python-versions = ">=3.6" -files = [ - {file = "amqp-5.1.1-py3-none-any.whl", hash = "sha256:6f0956d2c23d8fa6e7691934d8c3930eadb44972cbbd1a7ae3a520f735d43359"}, - {file = "amqp-5.1.1.tar.gz", hash = "sha256:2c1b13fecc0893e946c65cbd5f36427861cffa4ea2201d8f6fca22e2a373b5e2"}, -] - -[package.dependencies] -vine = ">=5.0.0" - -[[package]] -name = "appnope" -version = "0.1.3" -description = "Disable App Nap on macOS >= 10.9" -optional = false -python-versions = "*" -files = [ - {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, - {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, -] - -[[package]] -name = "asgiref" -version = "3.7.2" -description = "ASGI specs, helper code, and adapters" -optional = false -python-versions = ">=3.7" -files = [ - {file = "asgiref-3.7.2-py3-none-any.whl", hash = "sha256:89b2ef2247e3b562a16eef663bc0e2e703ec6468e2fa8a5cd61cd449786d4f6e"}, - {file = "asgiref-3.7.2.tar.gz", hash = "sha256:9e0ce3aa93a819ba5b45120216b23878cf6e8525eb3848653452b4192b92afed"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} - -[package.extras] -tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] - -[[package]] -name = "asttokens" -version = "2.2.1" -description = "Annotate AST trees with source code positions" -optional = false -python-versions = "*" -files = [ - {file = "asttokens-2.2.1-py2.py3-none-any.whl", hash = "sha256:6b0ac9e93fb0335014d382b8fa9b3afa7df546984258005da0b9e7095b3deb1c"}, - {file = "asttokens-2.2.1.tar.gz", hash = "sha256:4622110b2a6f30b77e1473affaa97e711bc2f07d3f10848420ff1898edbe94f3"}, -] - -[package.dependencies] -six = "*" - -[package.extras] -test = ["astroid", "pytest"] - -[[package]] -name = "async-timeout" -version = "4.0.3" -description = "Timeout context manager for asyncio programs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, -] - -[[package]] -name = "atomicwrites" -version = "1.4.1" -description = "Atomic file writes." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, -] - -[[package]] -name = "attrs" -version = "23.1.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, - {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, -] - -[package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] - -[[package]] -name = "autobahn" -version = "23.6.2" -description = "WebSocket client & server library, WAMP real-time framework" -optional = false -python-versions = ">=3.9" -files = [ - {file = "autobahn-23.6.2.tar.gz", hash = "sha256:ec9421c52a2103364d1ef0468036e6019ee84f71721e86b36fe19ad6966c1181"}, -] - -[package.dependencies] -cryptography = ">=3.4.6" -hyperlink = ">=21.0.0" -setuptools = "*" -txaio = ">=21.2.1" - -[package.extras] -all = ["PyGObject (>=3.40.0)", "argon2_cffi (>=20.1.0)", "attrs (>=20.3.0)", "base58 (>=2.1.0)", "bitarray (>=2.7.5)", "cbor2 (>=5.2.0)", "cffi (>=1.14.5)", "click (>=8.1.2)", "ecdsa (>=0.16.1)", "eth-abi (>=4.0.0)", "flatbuffers (>=22.12.6)", "hkdf (>=0.0.3)", "jinja2 (>=2.11.3)", "mnemonic (>=0.19)", "msgpack (>=1.0.2)", "passlib (>=1.7.4)", "py-ecc (>=5.1.0)", "py-eth-sig-utils (>=0.4.0)", "py-multihash (>=2.0.1)", "py-ubjson (>=0.16.1)", "pynacl (>=1.4.0)", "pyopenssl (>=20.0.1)", "python-snappy (>=0.6.0)", "pytrie (>=0.4.0)", "qrcode (>=7.3.1)", "rlp (>=2.0.1)", "service_identity (>=18.1.0)", "spake2 (>=0.8)", "twisted (>=20.3.0)", "ujson (>=4.0.2)", "web3[ipfs] (>=6.0.0)", "xbr (>=21.2.1)", "yapf (==0.29.0)", "zlmdb (>=21.2.1)", "zope.interface (>=5.2.0)"] -compress = ["python-snappy (>=0.6.0)"] -dev = ["backports.tempfile (>=1.0)", "bumpversion (>=0.5.3)", "codecov (>=2.0.15)", "flake8 (<5)", "humanize (>=0.5.1)", "mypy (>=0.610)", "passlib", "pep8-naming (>=0.3.3)", "pip (>=9.0.1)", "pyenchant (>=1.6.6)", "pyflakes (>=1.0.0)", "pyinstaller (>=4.2)", "pylint (>=1.9.2)", "pytest (>=3.4.2)", "pytest-aiohttp", "pytest-asyncio (>=0.14.0)", "pytest-runner (>=2.11.1)", "pyyaml (>=4.2b4)", "qualname", "sphinx (>=1.7.1)", "sphinx-autoapi (>=1.7.0)", "sphinx_rtd_theme (>=0.1.9)", "sphinxcontrib-images (>=0.9.1)", "tox (>=4.2.8)", "tox-gh-actions (>=2.2.0)", "twine (>=3.3.0)", "twisted (>=22.10.0)", "txaio (>=20.4.1)", "watchdog (>=0.8.3)", "wheel (>=0.36.2)", "yapf (==0.29.0)"] -encryption = ["pynacl (>=1.4.0)", "pyopenssl (>=20.0.1)", "pytrie (>=0.4.0)", "qrcode (>=7.3.1)", "service_identity (>=18.1.0)"] -nvx = ["cffi (>=1.14.5)"] -scram = ["argon2_cffi (>=20.1.0)", "cffi (>=1.14.5)", "passlib (>=1.7.4)"] -serialization = ["cbor2 (>=5.2.0)", "flatbuffers (>=22.12.6)", "msgpack (>=1.0.2)", "py-ubjson (>=0.16.1)", "ujson (>=4.0.2)"] -twisted = ["attrs (>=20.3.0)", "twisted (>=20.3.0)", "zope.interface (>=5.2.0)"] -ui = ["PyGObject (>=3.40.0)"] -xbr = ["base58 (>=2.1.0)", "bitarray (>=2.7.5)", "cbor2 (>=5.2.0)", "click (>=8.1.2)", "ecdsa (>=0.16.1)", "eth-abi (>=4.0.0)", "hkdf (>=0.0.3)", "jinja2 (>=2.11.3)", "mnemonic (>=0.19)", "py-ecc (>=5.1.0)", "py-eth-sig-utils (>=0.4.0)", "py-multihash (>=2.0.1)", "rlp (>=2.0.1)", "spake2 (>=0.8)", "twisted (>=20.3.0)", "web3[ipfs] (>=6.0.0)", "xbr (>=21.2.1)", "yapf (==0.29.0)", "zlmdb (>=21.2.1)"] - -[[package]] -name = "automat" -version = "22.10.0" -description = "Self-service finite-state machines for the programmer on the go." -optional = false -python-versions = "*" -files = [ - {file = "Automat-22.10.0-py2.py3-none-any.whl", hash = "sha256:c3164f8742b9dc440f3682482d32aaff7bb53f71740dd018533f9de286b64180"}, - {file = "Automat-22.10.0.tar.gz", hash = "sha256:e56beb84edad19dcc11d30e8d9b895f75deeb5ef5e96b84a467066b3b84bb04e"}, -] - -[package.dependencies] -attrs = ">=19.2.0" -six = "*" - -[package.extras] -visualize = ["Twisted (>=16.1.1)", "graphviz (>0.5.1)"] - -[[package]] -name = "backcall" -version = "0.2.0" -description = "Specifications for callback functions passed in to an API" -optional = false -python-versions = "*" -files = [ - {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, - {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, -] - -[[package]] -name = "beautifulsoup4" -version = "4.12.2" -description = "Screen-scraping library" -optional = false -python-versions = ">=3.6.0" -files = [ - {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"}, - {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"}, -] - -[package.dependencies] -soupsieve = ">1.2" - -[package.extras] -html5lib = ["html5lib"] -lxml = ["lxml"] - -[[package]] -name = "billiard" -version = "4.1.0" -description = "Python multiprocessing fork with improvements and bugfixes" -optional = false -python-versions = ">=3.7" -files = [ - {file = "billiard-4.1.0-py3-none-any.whl", hash = "sha256:0f50d6be051c6b2b75bfbc8bfd85af195c5739c281d3f5b86a5640c65563614a"}, - {file = "billiard-4.1.0.tar.gz", hash = "sha256:1ad2eeae8e28053d729ba3373d34d9d6e210f6e4d8bf0a9c64f92bd053f1edf5"}, -] - -[[package]] -name = "black" -version = "22.12.0" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.7" -files = [ - {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, - {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, - {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, - {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, - {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, - {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, - {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, - {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, - {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, - {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, - {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, - {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} -typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - -[[package]] -name = "cachetools" -version = "5.3.1" -description = "Extensible memoizing collections and decorators" -optional = false -python-versions = ">=3.7" -files = [ - {file = "cachetools-5.3.1-py3-none-any.whl", hash = "sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590"}, - {file = "cachetools-5.3.1.tar.gz", hash = "sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b"}, -] - -[[package]] -name = "celery" -version = "5.3.1" -description = "Distributed Task Queue." -optional = false -python-versions = ">=3.8" -files = [ - {file = "celery-5.3.1-py3-none-any.whl", hash = "sha256:27f8f3f3b58de6e0ab4f174791383bbd7445aff0471a43e99cfd77727940753f"}, - {file = "celery-5.3.1.tar.gz", hash = "sha256:f84d1c21a1520c116c2b7d26593926581191435a03aa74b77c941b93ca1c6210"}, -] - -[package.dependencies] -billiard = ">=4.1.0,<5.0" -click = ">=8.1.2,<9.0" -click-didyoumean = ">=0.3.0" -click-plugins = ">=1.1.1" -click-repl = ">=0.2.0" -kombu = ">=5.3.1,<6.0" -python-dateutil = ">=2.8.2" -tzdata = ">=2022.7" -vine = ">=5.0.0,<6.0" - -[package.extras] -arangodb = ["pyArango (>=2.0.1)"] -auth = ["cryptography (==41.0.1)"] -azureblockblob = ["azure-storage-blob (>=12.15.0)"] -brotli = ["brotli (>=1.0.0)", "brotlipy (>=0.7.0)"] -cassandra = ["cassandra-driver (>=3.25.0,<4)"] -consul = ["python-consul2 (==0.1.5)"] -cosmosdbsql = ["pydocumentdb (==2.3.5)"] -couchbase = ["couchbase (>=3.0.0)"] -couchdb = ["pycouchdb (==1.14.2)"] -django = ["Django (>=2.2.28)"] -dynamodb = ["boto3 (>=1.26.143)"] -elasticsearch = ["elasticsearch (<8.0)"] -eventlet = ["eventlet (>=0.32.0)"] -gevent = ["gevent (>=1.5.0)"] -librabbitmq = ["librabbitmq (>=2.0.0)"] -memcache = ["pylibmc (==1.6.3)"] -mongodb = ["pymongo[srv] (>=4.0.2)"] -msgpack = ["msgpack (==1.0.5)"] -pymemcache = ["python-memcached (==1.59)"] -pyro = ["pyro4 (==4.82)"] -pytest = ["pytest-celery (==0.0.0)"] -redis = ["redis (>=4.5.2,!=4.5.5)"] -s3 = ["boto3 (>=1.26.143)"] -slmq = ["softlayer-messaging (>=1.0.3)"] -solar = ["ephem (==4.1.4)"] -sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"] -sqs = ["boto3 (>=1.26.143)", "kombu[sqs] (>=5.3.0)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"] -tblib = ["tblib (>=1.3.0)", "tblib (>=1.5.0)"] -yaml = ["PyYAML (>=3.10)"] -zookeeper = ["kazoo (>=1.3.1)"] -zstd = ["zstandard (==0.21.0)"] - -[[package]] -name = "certifi" -version = "2023.7.22" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, - {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, -] - -[[package]] -name = "cffi" -version = "1.15.1" -description = "Foreign Function Interface for Python calling C code." -optional = false -python-versions = "*" -files = [ - {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, - {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, - {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, - {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, - {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, - {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, - {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, - {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, - {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, - {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, - {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, - {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, - {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, - {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, - {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, - {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, - {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, - {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, - {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, -] - -[package.dependencies] -pycparser = "*" - -[[package]] -name = "cfgv" -version = "3.4.0" -description = "Validate configuration and produce human readable error messages." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, - {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, -] - -[[package]] -name = "channels" -version = "4.0.0" -description = "Brings async, event-driven capabilities to Django 3.2 and up." -optional = false -python-versions = ">=3.7" -files = [ - {file = "channels-4.0.0-py3-none-any.whl", hash = "sha256:2253334ac76f67cba68c2072273f7e0e67dbdac77eeb7e318f511d2f9a53c5e4"}, - {file = "channels-4.0.0.tar.gz", hash = "sha256:0ce53507a7da7b148eaa454526e0e05f7da5e5d1c23440e4886cf146981d8420"}, -] - -[package.dependencies] -asgiref = ">=3.5.0,<4" -daphne = {version = ">=4.0.0", optional = true, markers = "extra == \"daphne\""} -Django = ">=3.2" - -[package.extras] -daphne = ["daphne (>=4.0.0)"] -tests = ["async-timeout", "coverage (>=4.5,<5.0)", "pytest", "pytest-asyncio", "pytest-django"] - -[[package]] -name = "channels-redis" -version = "4.1.0" -description = "Redis-backed ASGI channel layer implementation" -optional = false -python-versions = ">=3.7" -files = [ - {file = "channels_redis-4.1.0-py3-none-any.whl", hash = "sha256:3696f5b9fe367ea495d402ba83d7c3c99e8ca0e1354ff8d913535976ed0abf73"}, - {file = "channels_redis-4.1.0.tar.gz", hash = "sha256:6bd4f75f4ab4a7db17cee495593ace886d7e914c66f8214a1f247ff6659c073a"}, -] - -[package.dependencies] -asgiref = ">=3.2.10,<4" -channels = "*" -msgpack = ">=1.0,<2.0" -redis = ">=4.5.3" - -[package.extras] -cryptography = ["cryptography (>=1.3.0)"] -tests = ["async-timeout", "cryptography (>=1.3.0)", "pytest", "pytest-asyncio", "pytest-timeout"] - -[[package]] -name = "chardet" -version = "5.2.0" -description = "Universal encoding detector for Python 3" -optional = false -python-versions = ">=3.7" -files = [ - {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, - {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.2.0" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, - {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, -] - -[[package]] -name = "click" -version = "8.1.6" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -files = [ - {file = "click-8.1.6-py3-none-any.whl", hash = "sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5"}, - {file = "click-8.1.6.tar.gz", hash = "sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "click-didyoumean" -version = "0.3.0" -description = "Enables git-like *did-you-mean* feature in click" -optional = false -python-versions = ">=3.6.2,<4.0.0" -files = [ - {file = "click-didyoumean-0.3.0.tar.gz", hash = "sha256:f184f0d851d96b6d29297354ed981b7dd71df7ff500d82fa6d11f0856bee8035"}, - {file = "click_didyoumean-0.3.0-py3-none-any.whl", hash = "sha256:a0713dc7a1de3f06bc0df5a9567ad19ead2d3d5689b434768a6145bff77c0667"}, -] - -[package.dependencies] -click = ">=7" - -[[package]] -name = "click-plugins" -version = "1.1.1" -description = "An extension module for click to enable registering CLI commands via setuptools entry-points." -optional = false -python-versions = "*" -files = [ - {file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"}, - {file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"}, -] - -[package.dependencies] -click = ">=4.0" - -[package.extras] -dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] - -[[package]] -name = "click-repl" -version = "0.3.0" -description = "REPL plugin for Click" -optional = false -python-versions = ">=3.6" -files = [ - {file = "click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9"}, - {file = "click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812"}, -] - -[package.dependencies] -click = ">=7.0" -prompt-toolkit = ">=3.0.36" - -[package.extras] -testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"] - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "constantly" -version = "15.1.0" -description = "Symbolic constants in Python" -optional = false -python-versions = "*" -files = [ - {file = "constantly-15.1.0-py2.py3-none-any.whl", hash = "sha256:dd2fa9d6b1a51a83f0d7dd76293d734046aa176e384bf6e33b7e44880eb37c5d"}, - {file = "constantly-15.1.0.tar.gz", hash = "sha256:586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35"}, -] - -[[package]] -name = "coverage" -version = "7.3.0" -description = "Code coverage measurement for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "coverage-7.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:db76a1bcb51f02b2007adacbed4c88b6dee75342c37b05d1822815eed19edee5"}, - {file = "coverage-7.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c02cfa6c36144ab334d556989406837336c1d05215a9bdf44c0bc1d1ac1cb637"}, - {file = "coverage-7.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:477c9430ad5d1b80b07f3c12f7120eef40bfbf849e9e7859e53b9c93b922d2af"}, - {file = "coverage-7.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce2ee86ca75f9f96072295c5ebb4ef2a43cecf2870b0ca5e7a1cbdd929cf67e1"}, - {file = "coverage-7.3.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68d8a0426b49c053013e631c0cdc09b952d857efa8f68121746b339912d27a12"}, - {file = "coverage-7.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b3eb0c93e2ea6445b2173da48cb548364f8f65bf68f3d090404080d338e3a689"}, - {file = "coverage-7.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:90b6e2f0f66750c5a1178ffa9370dec6c508a8ca5265c42fbad3ccac210a7977"}, - {file = "coverage-7.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:96d7d761aea65b291a98c84e1250cd57b5b51726821a6f2f8df65db89363be51"}, - {file = "coverage-7.3.0-cp310-cp310-win32.whl", hash = "sha256:63c5b8ecbc3b3d5eb3a9d873dec60afc0cd5ff9d9f1c75981d8c31cfe4df8527"}, - {file = "coverage-7.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:97c44f4ee13bce914272589b6b41165bbb650e48fdb7bd5493a38bde8de730a1"}, - {file = "coverage-7.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:74c160285f2dfe0acf0f72d425f3e970b21b6de04157fc65adc9fd07ee44177f"}, - {file = "coverage-7.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b543302a3707245d454fc49b8ecd2c2d5982b50eb63f3535244fd79a4be0c99d"}, - {file = "coverage-7.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad0f87826c4ebd3ef484502e79b39614e9c03a5d1510cfb623f4a4a051edc6fd"}, - {file = "coverage-7.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13c6cbbd5f31211d8fdb477f0f7b03438591bdd077054076eec362cf2207b4a7"}, - {file = "coverage-7.3.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fac440c43e9b479d1241fe9d768645e7ccec3fb65dc3a5f6e90675e75c3f3e3a"}, - {file = "coverage-7.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3c9834d5e3df9d2aba0275c9f67989c590e05732439b3318fa37a725dff51e74"}, - {file = "coverage-7.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4c8e31cf29b60859876474034a83f59a14381af50cbe8a9dbaadbf70adc4b214"}, - {file = "coverage-7.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7a9baf8e230f9621f8e1d00c580394a0aa328fdac0df2b3f8384387c44083c0f"}, - {file = "coverage-7.3.0-cp311-cp311-win32.whl", hash = "sha256:ccc51713b5581e12f93ccb9c5e39e8b5d4b16776d584c0f5e9e4e63381356482"}, - {file = "coverage-7.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:887665f00ea4e488501ba755a0e3c2cfd6278e846ada3185f42d391ef95e7e70"}, - {file = "coverage-7.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d000a739f9feed900381605a12a61f7aaced6beae832719ae0d15058a1e81c1b"}, - {file = "coverage-7.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:59777652e245bb1e300e620ce2bef0d341945842e4eb888c23a7f1d9e143c446"}, - {file = "coverage-7.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9737bc49a9255d78da085fa04f628a310c2332b187cd49b958b0e494c125071"}, - {file = "coverage-7.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5247bab12f84a1d608213b96b8af0cbb30d090d705b6663ad794c2f2a5e5b9fe"}, - {file = "coverage-7.3.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2ac9a1de294773b9fa77447ab7e529cf4fe3910f6a0832816e5f3d538cfea9a"}, - {file = "coverage-7.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:85b7335c22455ec12444cec0d600533a238d6439d8d709d545158c1208483873"}, - {file = "coverage-7.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:36ce5d43a072a036f287029a55b5c6a0e9bd73db58961a273b6dc11a2c6eb9c2"}, - {file = "coverage-7.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:211a4576e984f96d9fce61766ffaed0115d5dab1419e4f63d6992b480c2bd60b"}, - {file = "coverage-7.3.0-cp312-cp312-win32.whl", hash = "sha256:56afbf41fa4a7b27f6635bc4289050ac3ab7951b8a821bca46f5b024500e6321"}, - {file = "coverage-7.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:7f297e0c1ae55300ff688568b04ff26b01c13dfbf4c9d2b7d0cb688ac60df479"}, - {file = "coverage-7.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac0dec90e7de0087d3d95fa0533e1d2d722dcc008bc7b60e1143402a04c117c1"}, - {file = "coverage-7.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:438856d3f8f1e27f8e79b5410ae56650732a0dcfa94e756df88c7e2d24851fcd"}, - {file = "coverage-7.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1084393c6bda8875c05e04fce5cfe1301a425f758eb012f010eab586f1f3905e"}, - {file = "coverage-7.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49ab200acf891e3dde19e5aa4b0f35d12d8b4bd805dc0be8792270c71bd56c54"}, - {file = "coverage-7.3.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a67e6bbe756ed458646e1ef2b0778591ed4d1fcd4b146fc3ba2feb1a7afd4254"}, - {file = "coverage-7.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f39c49faf5344af36042b293ce05c0d9004270d811c7080610b3e713251c9b0"}, - {file = "coverage-7.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7df91fb24c2edaabec4e0eee512ff3bc6ec20eb8dccac2e77001c1fe516c0c84"}, - {file = "coverage-7.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:34f9f0763d5fa3035a315b69b428fe9c34d4fc2f615262d6be3d3bf3882fb985"}, - {file = "coverage-7.3.0-cp38-cp38-win32.whl", hash = "sha256:bac329371d4c0d456e8d5f38a9b0816b446581b5f278474e416ea0c68c47dcd9"}, - {file = "coverage-7.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:b859128a093f135b556b4765658d5d2e758e1fae3e7cc2f8c10f26fe7005e543"}, - {file = "coverage-7.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fc0ed8d310afe013db1eedd37176d0839dc66c96bcfcce8f6607a73ffea2d6ba"}, - {file = "coverage-7.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61260ec93f99f2c2d93d264b564ba912bec502f679793c56f678ba5251f0393"}, - {file = "coverage-7.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97af9554a799bd7c58c0179cc8dbf14aa7ab50e1fd5fa73f90b9b7215874ba28"}, - {file = "coverage-7.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3558e5b574d62f9c46b76120a5c7c16c4612dc2644c3d48a9f4064a705eaee95"}, - {file = "coverage-7.3.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37d5576d35fcb765fca05654f66aa71e2808d4237d026e64ac8b397ffa66a56a"}, - {file = "coverage-7.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:07ea61bcb179f8f05ffd804d2732b09d23a1238642bf7e51dad62082b5019b34"}, - {file = "coverage-7.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:80501d1b2270d7e8daf1b64b895745c3e234289e00d5f0e30923e706f110334e"}, - {file = "coverage-7.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4eddd3153d02204f22aef0825409091a91bf2a20bce06fe0f638f5c19a85de54"}, - {file = "coverage-7.3.0-cp39-cp39-win32.whl", hash = "sha256:2d22172f938455c156e9af2612650f26cceea47dc86ca048fa4e0b2d21646ad3"}, - {file = "coverage-7.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:60f64e2007c9144375dd0f480a54d6070f00bb1a28f65c408370544091c9bc9e"}, - {file = "coverage-7.3.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:5492a6ce3bdb15c6ad66cb68a0244854d9917478877a25671d70378bdc8562d0"}, - {file = "coverage-7.3.0.tar.gz", hash = "sha256:49dbb19cdcafc130f597d9e04a29d0a032ceedf729e41b181f51cd170e6ee865"}, -] - -[package.dependencies] -tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} - -[package.extras] -toml = ["tomli"] - -[[package]] -name = "cryptography" -version = "41.0.3" -description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -optional = false -python-versions = ">=3.7" -files = [ - {file = "cryptography-41.0.3-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507"}, - {file = "cryptography-41.0.3-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116"}, - {file = "cryptography-41.0.3-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c"}, - {file = "cryptography-41.0.3-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae"}, - {file = "cryptography-41.0.3-cp37-abi3-win32.whl", hash = "sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306"}, - {file = "cryptography-41.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4"}, - {file = "cryptography-41.0.3.tar.gz", hash = "sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34"}, -] - -[package.dependencies] -cffi = ">=1.12" - -[package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -nox = ["nox"] -pep8test = ["black", "check-sdist", "mypy", "ruff"] -sdist = ["build"] -ssh = ["bcrypt (>=3.1.5)"] -test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] -test-randomorder = ["pytest-randomly"] - -[[package]] -name = "cssselect" -version = "1.2.0" -description = "cssselect parses CSS3 Selectors and translates them to XPath 1.0" -optional = false -python-versions = ">=3.7" -files = [ - {file = "cssselect-1.2.0-py2.py3-none-any.whl", hash = "sha256:da1885f0c10b60c03ed5eccbb6b68d6eff248d91976fcde348f395d54c9fd35e"}, - {file = "cssselect-1.2.0.tar.gz", hash = "sha256:666b19839cfaddb9ce9d36bfe4c969132c647b92fc9088c4e23f786b30f1b3dc"}, -] - -[[package]] -name = "daphne" -version = "4.0.0" -description = "Django ASGI (HTTP/WebSocket) server" -optional = false -python-versions = ">=3.7" -files = [ - {file = "daphne-4.0.0-py3-none-any.whl", hash = "sha256:a288ece46012b6b719c37150be67c69ebfca0793a8521bf821533bad983179b2"}, - {file = "daphne-4.0.0.tar.gz", hash = "sha256:cce9afc8f49a4f15d4270b8cfb0e0fe811b770a5cc795474e97e4da287497666"}, -] - -[package.dependencies] -asgiref = ">=3.5.2,<4" -autobahn = ">=22.4.2" -twisted = {version = ">=22.4", extras = ["tls"]} - -[package.extras] -tests = ["django", "hypothesis", "pytest", "pytest-asyncio"] - -[[package]] -name = "decorator" -version = "5.1.1" -description = "Decorators for Humans" -optional = false -python-versions = ">=3.5" -files = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] - -[[package]] -name = "defusedxml" -version = "0.7.1" -description = "XML bomb protection for Python stdlib modules" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, - {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, -] - -[[package]] -name = "diff-match-patch" -version = "20230430" -description = "Diff Match and Patch" -optional = false -python-versions = ">=3.7" -files = [ - {file = "diff-match-patch-20230430.tar.gz", hash = "sha256:953019cdb9c9d2c9e47b5b12bcff3cf4746fc4598eb406076fa1fc27e6a1f15c"}, - {file = "diff_match_patch-20230430-py3-none-any.whl", hash = "sha256:dce43505fb7b1b317de7195579388df0746d90db07015ed47a85e5e44930ef93"}, -] - -[package.extras] -dev = ["attribution (==1.6.2)", "black (==23.3.0)", "flit (==3.8.0)", "mypy (==1.2.0)", "ufmt (==2.1.0)", "usort (==1.0.6)"] - -[[package]] -name = "distlib" -version = "0.3.7" -description = "Distribution utilities" -optional = false -python-versions = "*" -files = [ - {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, - {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, -] - -[[package]] -name = "django" -version = "3.2.20" -description = "A high-level Python Web framework that encourages rapid development and clean, pragmatic design." -optional = false -python-versions = ">=3.6" -files = [ - {file = "Django-3.2.20-py3-none-any.whl", hash = "sha256:a477ab326ae7d8807dc25c186b951ab8c7648a3a23f9497763c37307a2b5ef87"}, - {file = "Django-3.2.20.tar.gz", hash = "sha256:dec2a116787b8e14962014bf78e120bba454135108e1af9e9b91ade7b2964c40"}, -] - -[package.dependencies] -asgiref = ">=3.3.2,<4" -pytz = "*" -sqlparse = ">=0.2.2" - -[package.extras] -argon2 = ["argon2-cffi (>=19.1.0)"] -bcrypt = ["bcrypt"] - -[[package]] -name = "django-admin-extra-buttons" -version = "1.5.6" -description = "Django mixin to easily add buttons to any ModelAdmin" -optional = false -python-versions = "*" -files = [ - {file = "django-admin-extra-buttons-1.5.6.tar.gz", hash = "sha256:9616ae96e51cbdd60428bd5e1d2803ae0760a274ed777a56c2c3b2f23ffb872a"}, -] - -[package.extras] -dev = ["autopep8", "bump2version", "check-manifest", "django", "flake8", "isort", "pep8", "pip-tools", "readme", "sphinx", "wheel"] -docs = ["markdown-include", "mdx_gh_links", "mkdocs", "mkdocs-material", "mkdocs-material"] -test = ["coverage", "django_webtest", "factory-boy", "pdbpp", "pyquery", "pytest", "pytest-cov", "pytest-django", "pytest-echo", "tox (>=2.3)", "wheel"] - -[[package]] -name = "django-admin-ordering" -version = "0.17.0" -description = "Orderable change lists and inlines done right^Wsimple" -optional = false -python-versions = ">=3.8" -files = [ - {file = "django_admin_ordering-0.17.0-py3-none-any.whl", hash = "sha256:c617e439e21ee8f3cdb5b8722540411fe713d602ac6c5228d9647ab4b3f4cb9a"}, - {file = "django_admin_ordering-0.17.0.tar.gz", hash = "sha256:e68010e2f1610faf239d515898948824540d0627a0e7c48713c74c89e75378e9"}, -] - -[package.dependencies] -django-js-asset = ">=2.0" - -[package.extras] -tests = ["coverage"] - -[[package]] -name = "django-admin-sync" -version = "0.7.1" -description = "" -optional = false -python-versions = "*" -files = [ - {file = "django-admin-sync-0.7.1.tar.gz", hash = "sha256:70c1ff70affd05c62bbf809572ff28648e869c059294656c05364e7036cebe32"}, -] - -[package.dependencies] -django-admin-extra-buttons = ">=1.5.1" -requests = "*" - -[package.extras] -constance = ["django-constance", "django-picklefield"] -dev = ["black", "django", "django-concurrency", "django-reversion", "django-smart-admin", "django-webtest", "factory-boy", "flake8 (<5)", "flake8-html", "freezegun", "isort", "pdbpp", "pytest", "pytest-coverage", "pytest-django", "pytest-echo", "pytest-responses", "redis", "tox"] -docs = ["markdown-include", "mdx_gh_links", "mkdocs", "mkdocs-material"] -test = ["black", "django-concurrency", "django-constance", "django-picklefield", "django-reversion", "django-smart-admin", "django-webtest", "factory-boy", "flake8 (<5)", "flake8-html", "freezegun", "isort", "pytest", "pytest-coverage", "pytest-django", "pytest-echo", "pytest-responses", "redis", "tox"] - -[[package]] -name = "django-adminactions" -version = "2.1.0" -description = "Collections of useful actions to use with django.contrib.admin.ModelAdmin" -optional = false -python-versions = "*" -files = [ - {file = "django-adminactions-2.1.0.tar.gz", hash = "sha256:e750f060481bd8fd05e254225fa8a9ea7aac1f0b5cf79db352777152fc7eb679"}, - {file = "django_adminactions-2.1.0-py2.py3-none-any.whl", hash = "sha256:e3fa730284ae4998156aacdd5112036cd451bfd355d464e0dc66f9e6c6eb192e"}, -] - -[package.dependencies] -pytz = "*" -xlrd = ">=0.9.2" -xlwt = "*" - -[package.extras] -dev = ["black", "celery", "check-manifest", "django (<4)", "django-admin-extra-urls", "django-dynamic-fixture", "django-environ", "django-webtest (>1.9.6)", "docutils", "flake8", "flake8-isort", "mock (>=1.0.1)", "modernize", "pdbpp", "pillow", "pytest", "pytest-cache", "pytest-cov", "pytest-django", "pytest-echo", "pytest-pythonpath", "readme", "redis", "selenium (>=2.42.0)", "setuptools (>=15.0)", "tox (<4)", "virtualenv", "wheel"] -docs = ["black", "check-manifest", "django", "django (<4)", "docutils", "pdbpp", "sphinx", "sphinx-rtd-theme", "virtualenv", "wheel"] -test = ["celery", "check-manifest", "django-admin-extra-urls", "django-dynamic-fixture", "django-environ", "django-webtest (>1.9.6)", "flake8", "flake8-isort", "mock (>=1.0.1)", "modernize", "pillow", "pytest", "pytest-cache", "pytest-cov", "pytest-django", "pytest-echo", "pytest-pythonpath", "pytz", "readme", "redis", "selenium (>=2.42.0)", "setuptools (>=15.0)", "tox (<4)", "xlrd (>=0.9.2)", "xlwt"] - -[[package]] -name = "django-adminfilters" -version = "2.2.0" -description = "Extra filters for django admin site" -optional = false -python-versions = "*" -files = [ - {file = "django-adminfilters-2.2.0.tar.gz", hash = "sha256:8ee6315e814bcba05b4de25038a9580ca8396f37ed21b35e570811fc76b0e46c"}, - {file = "django_adminfilters-2.2.0-py2.py3-none-any.whl", hash = "sha256:5f7eb998e5a80cdcf6089d3d863d8261aa9c6a03ef1ef33d46de148b32654adb"}, -] - -[package.extras] -dev = ["black", "check-manifest", "django", "django-environ", "factory-boy", "flake8", "isort", "pdbpp", "pre-commit", "psycopg2-binary", "pytest", "pytest-cov", "pytest-django", "pytest-echo", "pytest-pythonpath", "pytest-selenium", "sphinx", "sphinx-rtd-theme", "sphinxcontrib.video", "tox"] -test = ["check-manifest", "django-environ", "factory-boy", "flake8", "isort", "pdbpp", "psycopg2-binary", "pytest", "pytest-cov", "pytest-django", "pytest-echo", "pytest-pythonpath", "pytest-selenium", "tox"] - -[[package]] -name = "django-appconf" -version = "1.0.5" -description = "A helper class for handling configuration defaults of packaged apps gracefully." -optional = false -python-versions = ">=3.6" -files = [ - {file = "django-appconf-1.0.5.tar.gz", hash = "sha256:be3db0be6c81fa84742000b89a81c016d70ae66a7ccb620cdef592b1f1a6aaa4"}, - {file = "django_appconf-1.0.5-py3-none-any.whl", hash = "sha256:ae9f864ee1958c815a965ed63b3fba4874eec13de10236ba063a788f9a17389d"}, -] - -[package.dependencies] -django = "*" - -[[package]] -name = "django-bitfield" -version = "2.2.0" -description = "BitField in Django" -optional = false -python-versions = "*" -files = [ - {file = "django-bitfield-2.2.0.tar.gz", hash = "sha256:1b21262acc4ec0af3f82ed04498a056cd9d5452532ac02771e004835a34e0b1b"}, -] - -[package.dependencies] -Django = ">=1.11.29" -six = "*" - -[package.extras] -tests = ["flake8", "mysqlclient", "psycopg2 (>=2.3)", "pytest-django"] - -[[package]] -name = "django-click" -version = "2.3.0" -description = "Build Django management commands using the click CLI package." -optional = false -python-versions = "*" -files = [ - {file = "django-click-2.3.0.tar.gz", hash = "sha256:bffb0d10c3f3dfe3f5b116ce902c7a7a9aa4e9c046de95259dc044462eb746b4"}, - {file = "django_click-2.3.0-py2.py3-none-any.whl", hash = "sha256:b862d6b2424edba564968834c8570e39db8142babaa843fa791a4958a2dabb54"}, -] - -[package.dependencies] -click = ">=7.1" - -[package.extras] -dev = ["Fabric", "Sphinx", "check-manifest", "flake8", "livereload", "mccabe", "pep8", "pep8-naming", "pyflakes", "sphinx-autobuild", "sphinx-rtd-theme", "wheel"] -test = ["coverage (<5)", "pytest", "pytest-cov", "pytest-django", "pytest-flake8"] - -[[package]] -name = "django-concurrency" -version = "2.4" -description = "Optimistic lock implementation for Django. Prevents users from doing concurrent editing." -optional = false -python-versions = "*" -files = [ - {file = "django-concurrency-2.4.tar.gz", hash = "sha256:6d45e19f4a288da34578aaeb8eceb394ffac5bfaee8dc43f157c051c7b9cae6c"}, - {file = "django_concurrency-2.4-py2.py3-none-any.whl", hash = "sha256:ae7dcec114ba14216c97c33870de45b62e381aa99ec3c7c4e047921b25d9246f"}, -] - -[[package]] -name = "django-constance" -version = "2.9.1" -description = "Django live settings with pluggable backends, including Redis." -optional = false -python-versions = ">=3.6" -files = [ - {file = "django-constance-2.9.1.tar.gz", hash = "sha256:4c6a96a5f2cbce1bc3fa41aa20566b6ee26fbd896c9f91f996518a3a0904f6c8"}, - {file = "django_constance-2.9.1-py3-none-any.whl", hash = "sha256:bf0b392efa18a1f3f464eddb7eb36ac5c02598354a5e31d0d4ce4fc8b535694b"}, -] - -[package.extras] -database = ["django-picklefield"] -redis = ["redis"] - -[[package]] -name = "django-cors-headers" -version = "3.14.0" -description = "django-cors-headers is a Django application for handling the server headers required for Cross-Origin Resource Sharing (CORS)." -optional = false -python-versions = ">=3.7" -files = [ - {file = "django_cors_headers-3.14.0-py3-none-any.whl", hash = "sha256:684180013cc7277bdd8702b80a3c5a4b3fcae4abb2bf134dceb9f5dfe300228e"}, - {file = "django_cors_headers-3.14.0.tar.gz", hash = "sha256:5fbd58a6fb4119d975754b2bc090f35ec160a8373f276612c675b00e8a138739"}, -] - -[package.dependencies] -Django = ">=3.2" - -[[package]] -name = "django-csp" -version = "3.7" -description = "Django Content Security Policy support." -optional = false -python-versions = "*" -files = [ - {file = "django_csp-3.7-py2.py3-none-any.whl", hash = "sha256:01443a07723f9a479d498bd7bb63571aaa771e690f64bde515db6cdb76e8041a"}, - {file = "django_csp-3.7.tar.gz", hash = "sha256:01eda02ad3f10261c74131cdc0b5a6a62b7c7ad4fd017fbefb7a14776e0a9727"}, -] - -[package.dependencies] -Django = ">=1.8" - -[package.extras] -jinja2 = ["jinja2 (>=2.9.6)"] -tests = ["jinja2 (>=2.9.6)", "mock (==1.0.1)", "pep8 (==1.4.6)", "pytest (<4.0)", "pytest-django", "pytest-flakes (==1.0.1)", "pytest-pep8 (==1.0.6)", "six (==1.12.0)"] - -[[package]] -name = "django-debug-toolbar" -version = "3.8.1" -description = "A configurable set of panels that display various debug information about the current request/response." -optional = false -python-versions = ">=3.7" -files = [ - {file = "django_debug_toolbar-3.8.1-py3-none-any.whl", hash = "sha256:879f8a4672d41621c06a4d322dcffa630fc4df056cada6e417ed01db0e5e0478"}, - {file = "django_debug_toolbar-3.8.1.tar.gz", hash = "sha256:24ef1a7d44d25e60d7951e378454c6509bf536dce7e7d9d36e7c387db499bc27"}, -] - -[package.dependencies] -django = ">=3.2.4" -sqlparse = ">=0.2" - -[[package]] -name = "django-environ" -version = "0.9.0" -description = "A package that allows you to utilize 12factor inspired environment variables to configure your Django application." -optional = false -python-versions = ">=3.4,<4" -files = [ - {file = "django-environ-0.9.0.tar.gz", hash = "sha256:bff5381533056328c9ac02f71790bd5bf1cea81b1beeb648f28b81c9e83e0a21"}, - {file = "django_environ-0.9.0-py2.py3-none-any.whl", hash = "sha256:f21a5ef8cc603da1870bbf9a09b7e5577ab5f6da451b843dbcc721a7bca6b3d9"}, -] - -[package.extras] -develop = ["coverage[toml] (>=5.0a4)", "furo (>=2021.8.17b43,<2021.9.dev0)", "pytest (>=4.6.11)", "sphinx (>=3.5.0)", "sphinx-notfound-page"] -docs = ["furo (>=2021.8.17b43,<2021.9.dev0)", "sphinx (>=3.5.0)", "sphinx-notfound-page"] -testing = ["coverage[toml] (>=5.0a4)", "pytest (>=4.6.11)"] - -[[package]] -name = "django-filter" -version = "22.1" -description = "Django-filter is a reusable Django application for allowing users to filter querysets dynamically." -optional = false -python-versions = ">=3.7" -files = [ - {file = "django-filter-22.1.tar.gz", hash = "sha256:ed473b76e84f7e83b2511bb2050c3efb36d135207d0128dfe3ae4b36e3594ba5"}, - {file = "django_filter-22.1-py3-none-any.whl", hash = "sha256:ed429e34760127e3520a67f415bec4c905d4649fbe45d0d6da37e6ff5e0287eb"}, -] - -[package.dependencies] -Django = ">=3.2" - -[[package]] -name = "django-flags" -version = "5.0.13" -description = "Feature flags for Django projects" -optional = false -python-versions = ">=3.7" -files = [ - {file = "django-flags-5.0.13.tar.gz", hash = "sha256:ff6940cf37e07d6d0c4ac28c5420c8cfc478b62541473dba4aa02d600f7db9fc"}, - {file = "django_flags-5.0.13-py2.py3-none-any.whl", hash = "sha256:52df74b86d93f5cb402190ad26b68a5ba0f127e9e016189f1a6f2e8ba3c06a42"}, -] - -[package.dependencies] -Django = ">=2.2" - -[package.extras] -docs = ["mkdocs (>=0.17)", "mkdocs-rtd-dropdown (>=0.0.11)", "pymdown-extensions (>=4.11)"] -testing = ["coverage (>=3.7.0)", "django-debug-toolbar (>=3.2,<4)", "jinja2"] - -[[package]] -name = "django-front-door" -version = "0.10.0" -description = "" -optional = false -python-versions = "*" -files = [ - {file = "django-front-door-0.10.0.tar.gz", hash = "sha256:6c169a829b86d2a8e4bb1365af8a04d54cd5ef1787b0e9c720231c0bdb51abf1"}, -] - -[package.dependencies] -django_regex = "*" - -[package.extras] -constance = ["django-constance", "django-picklefield"] -dev = ["django", "django-constance", "django-picklefield", "django-webtest", "flake8", "isort", "pdbpp", "pytest", "pytest-coverage", "pytest-django", "pytest-echo", "redis", "tox"] -docs = ["django", "pdbpp"] -redis = ["redis"] -test = ["django-constance", "django-constance", "django-picklefield", "django-picklefield", "django-webtest", "django_regex", "flake8", "isort", "pytest", "pytest-coverage", "pytest-django", "pytest-echo", "redis", "redis", "tox", "user_agents"] -ua = ["user_agents"] - -[[package]] -name = "django-hijack" -version = "3.4.1" -description = "django-hijack allows superusers to hijack (=login as) and work on behalf of another user." -optional = false -python-versions = "*" -files = [ - {file = "django-hijack-3.4.1.tar.gz", hash = "sha256:2bdd7ffa42198637f49c195468b8330ea51dc36b7c2bee0f3c220c9d7de78764"}, - {file = "django_hijack-3.4.1-py3-none-any.whl", hash = "sha256:d49b4b6426f4c4694ee0c7591990f9ccb91a7910ba22a8f10e11e0b1f3e0b5b4"}, -] - -[package.dependencies] -django = ">=2.2" - -[package.extras] -test = ["pytest", "pytest-cov", "pytest-django"] - -[[package]] -name = "django-import-export" -version = "3.2.0" -description = "Django application and library for importing and exporting data with included admin integration." -optional = false -python-versions = ">=3.7" -files = [ - {file = "django-import-export-3.2.0.tar.gz", hash = "sha256:38fd7b9439b9e3aa1a4747421c1087a5bc194e915a28d795fb8429a5f8028f2d"}, - {file = "django_import_export-3.2.0-py3-none-any.whl", hash = "sha256:1d3f2cb2ee3cca0386ed60651fa1623be989f130d9fbdf98a67f7dc3a94b8a37"}, -] - -[package.dependencies] -diff-match-patch = "*" -Django = ">=3.2" -tablib = {version = ">=3.4.0", extras = ["html", "ods", "xls", "xlsx", "yaml"]} - -[[package]] -name = "django-js-asset" -version = "2.1.0" -description = "script tag with additional attributes for django.forms.Media" -optional = false -python-versions = ">=3.8" -files = [ - {file = "django_js_asset-2.1.0-py3-none-any.whl", hash = "sha256:36a3a4dd6e9efc895fb127d13126020f6ec1ec9469ad42878d42143f22495d90"}, - {file = "django_js_asset-2.1.0.tar.gz", hash = "sha256:be6f69ae5c4865617aa7726c48eddb64089a1e7d4ea7d22a35a3beb8282020f6"}, -] - -[package.dependencies] -django = ">=3.2" - -[package.extras] -tests = ["coverage"] - -[[package]] -name = "django-jsoneditor" -version = "0.2.4" -description = "Django JSON Editor" -optional = false -python-versions = "*" -files = [ - {file = "django-jsoneditor-0.2.4.tar.gz", hash = "sha256:1d3dfca28f047feefa6ebc6f9541179eb815fb459b006faf3fb8d0fb2197d2df"}, - {file = "django_jsoneditor-0.2.4-py2.py3-none-any.whl", hash = "sha256:d7a639a7251e376126b5be64ea588c925c7a40d45e0e212f66ef475d2f0f90bb"}, -] - -[package.dependencies] -packaging = "*" - -[[package]] -name = "django-mdeditor" -version = "0.1.20" -description = "A simple Django app to edit markdown text." -optional = false -python-versions = "*" -files = [ - {file = "django-mdeditor-0.1.20.tar.gz", hash = "sha256:d1f3b735b6202b8820f10e024a5aa28c5b948897ee7a8dbb84558df75339827e"}, -] - -[[package]] -name = "django-mptt" -version = "0.14.0" -description = "Utilities for implementing Modified Preorder Tree Traversal with your Django Models and working with trees of Model instances." -optional = false -python-versions = ">=3.6" -files = [ - {file = "django-mptt-0.14.0.tar.gz", hash = "sha256:2c92a2b1614c53086278795ccf50580cf1f9b8564f3ff03055dd62bab5987711"}, - {file = "django_mptt-0.14.0-py3-none-any.whl", hash = "sha256:d9a87433ab0e4f35247c6f6d5a93ace6990860a4ba8796f815d185f773b9acfc"}, -] - -[package.dependencies] -django-js-asset = "*" - -[package.extras] -tests = ["coverage", "mock-django"] - -[[package]] -name = "django-picklefield" -version = "3.1" -description = "Pickled object field for Django" -optional = false -python-versions = ">=3" -files = [ - {file = "django-picklefield-3.1.tar.gz", hash = "sha256:c786cbeda78d6def2b43bff4840d19787809c8909f7ad683961703060398d356"}, - {file = "django_picklefield-3.1-py3-none-any.whl", hash = "sha256:d77c504df7311e8ec14e8b779f10ca6fec74de6c7f8e2c136e1ef60cf955125d"}, -] - -[package.dependencies] -Django = ">=3.2" - -[package.extras] -tests = ["tox"] - -[[package]] -name = "django-pwa" -version = "1.1.0" -description = "A Django app to include a manifest.json and Service Worker instance to enable progressive web app behavior" -optional = false -python-versions = "*" -files = [ - {file = "django-pwa-1.1.0.tar.gz", hash = "sha256:9cf8e5ac02ee462dd7c2c4309ee2cb0066334f9963dcadd053bd3805792bac29"}, - {file = "django_pwa-1.1.0-py3-none-any.whl", hash = "sha256:92ab82211eb3c79dfb3fc1f40cdf2727503199ec5c4bf21e7fbf8c0dd1daeaa0"}, -] - -[package.dependencies] -django = ">=2.0" - -[[package]] -name = "django-ranged-response" -version = "0.2.0" -description = "Modified Django FileResponse that adds Content-Range headers." -optional = false -python-versions = "*" -files = [ - {file = "django-ranged-response-0.2.0.tar.gz", hash = "sha256:f71fff352a37316b9bead717fc76e4ddd6c9b99c4680cdf4783b9755af1cf985"}, -] - -[package.dependencies] -django = "*" - -[[package]] -name = "django-redis" -version = "5.3.0" -description = "Full featured redis cache backend for Django." -optional = false -python-versions = ">=3.6" -files = [ - {file = "django-redis-5.3.0.tar.gz", hash = "sha256:8bc5793ec06b28ea802aad85ec437e7646511d4e571e07ccad19cfed8b9ddd44"}, - {file = "django_redis-5.3.0-py3-none-any.whl", hash = "sha256:2d8660d39f586c41c9907d5395693c477434141690fd7eca9d32376af00b0aac"}, -] - -[package.dependencies] -Django = ">=3.2" -redis = ">=3,<4.0.0 || >4.0.0,<4.0.1 || >4.0.1" - -[package.extras] -hiredis = ["redis[hiredis] (>=3,!=4.0.0,!=4.0.1)"] - -[[package]] -name = "django-regex" -version = "0.5.0" -description = "Fields and utilities to work with regular expression in Django" -optional = false -python-versions = "*" -files = [ - {file = "django-regex-0.5.0.tar.gz", hash = "sha256:6af1add11ae5232f133a42754c9291f9113996b1294b048305d9f1a427bca27c"}, -] - -[package.extras] -dev = ["devpi-builder", "flake8", "isort", "pip-tools", "sphinx"] -extra = ["admin-extra-urls"] -test = ["admin-extra-urls", "django-webtest", "factory_boy", "isort", "pdbpp", "pytest", "pytest-cov", "pytest-django", "pytest-echo", "pytest-isort", "pytest-pep8", "pytest-pythonpath", "tox"] -tests = ["admin-extra-urls", "django-webtest", "factory_boy", "isort", "pdbpp", "pytest", "pytest-cov", "pytest-django", "pytest-echo", "pytest-isort", "pytest-pep8", "pytest-pythonpath", "tox"] - -[[package]] -name = "django-reversion" -version = "5.0.4" -description = "An extension to the Django web framework that provides version control for model instances." -optional = false -python-versions = ">=3.7" -files = [ - {file = "django-reversion-5.0.4.tar.gz", hash = "sha256:c12bab452d31dd3c244456cf1df383acf14ba147cf99404c5e44412596de42fc"}, - {file = "django_reversion-5.0.4-py3-none-any.whl", hash = "sha256:a591cbce8621b5d036a37617554668b5ef2eb9777682e3af20b6401ee87cfbc5"}, -] - -[package.dependencies] -django = ">=3.2" - -[[package]] -name = "django-reversion-compare" -version = "0.15.0" -description = "history compare for django-reversion" -optional = false -python-versions = ">=3.7,<4.0.0" -files = [ - {file = "django-reversion-compare-0.15.0.tar.gz", hash = "sha256:ed0264a2852d9d867023f1874948b8234dad9c2d2fa22ea18cfd5f28f304d7a0"}, - {file = "django_reversion_compare-0.15.0-py3-none-any.whl", hash = "sha256:d6f37b106aec287ae17a076bb7db1184ab02ab1898f0e8693f2779fbdaf71697"}, -] - -[package.dependencies] -diff-match-patch = "*" -django-reversion = "*" - -[[package]] -name = "django-simple-captcha" -version = "0.5.18" -description = "A very simple, yet powerful, Django captcha application" -optional = false -python-versions = "*" -files = [ - {file = "django-simple-captcha-0.5.18.tar.gz", hash = "sha256:6e1fcc4f4005f7d69ee7a2e59a7e863b5d3918f36a85a4d811498984aecc48ce"}, - {file = "django_simple_captcha-0.5.18-py2.py3-none-any.whl", hash = "sha256:567ad84fa64c86508c679b8425cc1410c44b3cd6467e54f8d31cf077d9366407"}, -] - -[package.dependencies] -Django = ">=3.2" -django-ranged-response = "0.2.0" -Pillow = ">=6.2.0" - -[package.extras] -test = ["testfixtures"] - -[[package]] -name = "django-simple-math-captcha" -version = "2.0.1" -description = "An easy-to-use math field/widget captcha for Django forms." -optional = false -python-versions = "*" -files = [ - {file = "django-simple-math-captcha-2.0.1.tar.gz", hash = "sha256:5363a4754dad461b22324668032e08422dc9a5ef611d35950fd3da92a87d1ad3"}, - {file = "django_simple_math_captcha-2.0.1-py2-none-any.whl", hash = "sha256:7e14a9bae635fce0824aeb51df7004d733d7ab41c8108514474ed09588181ddf"}, -] - -[[package]] -name = "django-smart-admin" -version = "2.6.0" -description = "" -optional = false -python-versions = "==3.*,>=3.8" -files = [ - {file = "django-smart-admin-2.6.0.tar.gz", hash = "sha256:9ac878433c57eb285360e0c019258fc7fef9d5557805dafb55d2f965e4fe02e2"}, -] - -[package.dependencies] -django-admin-extra-buttons = "*" -django-adminactions = ">=1.14" -django-adminfilters = ">=2" -django-sysinfo = ">=2.6.2" - -[package.extras] -dev = ["bump2version", "django-constance", "django-environ", "django-picklefield", "django-webtest", "factory-boy", "factory_boy", "flake8", "isort", "psycopg2", "pyquery", "pytest", "pytest-cov", "pytest-django", "pytest-echo", "pytest-pythonpath", "tox", "whitenoise"] - -[[package]] -name = "django-strategy-field" -version = "3.0.0" -description = "" -optional = false -python-versions = "*" -files = [ - {file = "django-strategy-field-3.0.0.tar.gz", hash = "sha256:a65fcacbf97d3e61a9208c4dd4f2a601ce2819b5e09403c8338c8e08f298bd06"}, -] - -[package.dependencies] -pytz = "*" - -[package.extras] -drf = ["django-rest-framework"] -test = ["coverage", "django-webtest", "django_dynamic_fixture", "factory-boy", "faker", "flake8", "isort", "pycodestyle (>2.4)", "pytest (<7)", "pytest-coverage", "pytest-django", "pytest-echo", "pytest-pythonpath (>=0.7.4)", "tox", "twine", "webtest"] - -[[package]] -name = "django-stubs" -version = "1.16.0" -description = "Mypy stubs for Django" -optional = false -python-versions = ">=3.7" -files = [ - {file = "django-stubs-1.16.0.tar.gz", hash = "sha256:1bd96207576cd220221a0e615f0259f13d453d515a80f576c1246e0fb547f561"}, - {file = "django_stubs-1.16.0-py3-none-any.whl", hash = "sha256:c95f948e2bfc565f3147e969ff361ef033841a0b8a51cac974a6cc6d0486732c"}, -] - -[package.dependencies] -django = "*" -django-stubs-ext = ">=0.8.0" -mypy = [ - {version = ">=0.980"}, - {version = ">=1.1.1,<1.2", optional = true, markers = "extra == \"compatible-mypy\""}, -] -tomli = "*" -types-pytz = "*" -types-PyYAML = "*" -typing-extensions = "*" - -[package.extras] -compatible-mypy = ["mypy (>=1.1.1,<1.2)"] - -[[package]] -name = "django-stubs-ext" -version = "4.2.2" -description = "Monkey-patching and extensions for django-stubs" -optional = false -python-versions = ">=3.8" -files = [ - {file = "django-stubs-ext-4.2.2.tar.gz", hash = "sha256:c69d1cc46f1c4c3b7894b685a5022c29b2a36c7cfb52e23762eaf357ebfc2c98"}, - {file = "django_stubs_ext-4.2.2-py3-none-any.whl", hash = "sha256:fdacc65a14d2d4b97334b58ff178a5853ec8c8c76cec406e417916ad67536ce4"}, -] - -[package.dependencies] -django = "*" -typing-extensions = "*" - -[[package]] -name = "django-sysinfo" -version = "2.6.2" -description = "Simple django app to expose system infos: libraries version, databae server infos..." -optional = false -python-versions = ">=3.9,<4.0" -files = [ - {file = "django-sysinfo-2.6.2.tar.gz", hash = "sha256:2efe6c1a3bedc5766549b8872c7d8b3266ecb45db9a6423b94365d7d089c86dd"}, - {file = "django_sysinfo-2.6.2-py3-none-any.whl", hash = "sha256:9589d4bff0605ed7681a2ca22ce2326a4b64db768038da39180a2ba31ab3ffea"}, -] - -[package.dependencies] -psutil = "*" -python-dateutil = "*" -pytz = "*" - -[[package]] -name = "django-tinymce" -version = "3.6.1" -description = "A Django application that contains a widget to render a form field as a TinyMCE editor." -optional = false -python-versions = "*" -files = [ - {file = "django-tinymce-3.6.1.tar.gz", hash = "sha256:6f4f6227c2c608052081a436a1e3054c441caae24c9e0c8c3010536e24749e29"}, - {file = "django_tinymce-3.6.1-py3-none-any.whl", hash = "sha256:da5732413f51cf854352e3148f06f170b59d95a6c6a43fd9f7ccfdd1849f4bf9"}, -] - -[[package]] -name = "django-webtest" -version = "1.9.10" -description = "Instant integration of Ian Bicking's WebTest (http://docs.pylonsproject.org/projects/webtest/) with Django's testing framework." -optional = false -python-versions = "*" -files = [ - {file = "django-webtest-1.9.10.tar.gz", hash = "sha256:c8c32041791cdae468e443097c432c67cf17cad339e1ab88b01a6c4841ee4c74"}, - {file = "django_webtest-1.9.10-py3-none-any.whl", hash = "sha256:ef075e98b38fe3836dc533c2924d3e37c6bb3483008c40567115518a0303b1af"}, -] - -[package.dependencies] -webtest = ">=1.3.3" - -[[package]] -name = "djangorestframework" -version = "3.14.0" -description = "Web APIs for Django, made easy." -optional = false -python-versions = ">=3.6" -files = [ - {file = "djangorestframework-3.14.0-py3-none-any.whl", hash = "sha256:eb63f58c9f218e1a7d064d17a70751f528ed4e1d35547fdade9aaf4cd103fd08"}, - {file = "djangorestframework-3.14.0.tar.gz", hash = "sha256:579a333e6256b09489cbe0a067e66abe55c6595d8926be6b99423786334350c8"}, -] - -[package.dependencies] -django = ">=3.0" -pytz = "*" - -[[package]] -name = "djangorestframework-datatables" -version = "0.7.0" -description = "Seamless integration between Django REST framework and Datatables (https://datatables.net)" -optional = false -python-versions = "*" -files = [ - {file = "djangorestframework-datatables-0.7.0.tar.gz", hash = "sha256:64ccae255cbe03ae14793c55b900ce58894eb856816d6f9e16acea4a2197a6d9"}, - {file = "djangorestframework_datatables-0.7.0-py2.py3-none-any.whl", hash = "sha256:1be615b811a5625546e93f4c331b868743925776b6c7ad6c77d22e1af6f49819"}, -] - -[package.dependencies] -djangorestframework = ">=3.7.0" -pytz = "*" - -[[package]] -name = "et-xmlfile" -version = "1.1.0" -description = "An implementation of lxml.xmlfile for the standard library" -optional = false -python-versions = ">=3.6" -files = [ - {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"}, - {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, -] - -[[package]] -name = "exceptiongroup" -version = "1.1.3" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, - {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "executing" -version = "1.2.0" -description = "Get the currently executing AST node of a frame, and other information" -optional = false -python-versions = "*" -files = [ - {file = "executing-1.2.0-py2.py3-none-any.whl", hash = "sha256:0314a69e37426e3608aada02473b4161d4caf5a4b244d1d0c48072b8fee7bacc"}, - {file = "executing-1.2.0.tar.gz", hash = "sha256:19da64c18d2d851112f09c287f8d3dbbdf725ab0e569077efb6cdcbd3497c107"}, -] - -[package.extras] -tests = ["asttokens", "littleutils", "pytest", "rich"] - -[[package]] -name = "factory-boy" -version = "3.3.0" -description = "A versatile test fixtures replacement based on thoughtbot's factory_bot for Ruby." -optional = false -python-versions = ">=3.7" -files = [ - {file = "factory_boy-3.3.0-py2.py3-none-any.whl", hash = "sha256:a2cdbdb63228177aa4f1c52f4b6d83fab2b8623bf602c7dedd7eb83c0f69c04c"}, - {file = "factory_boy-3.3.0.tar.gz", hash = "sha256:bc76d97d1a65bbd9842a6d722882098eb549ec8ee1081f9fb2e8ff29f0c300f1"}, -] - -[package.dependencies] -Faker = ">=0.7.0" - -[package.extras] -dev = ["Django", "Pillow", "SQLAlchemy", "coverage", "flake8", "isort", "mongoengine", "sqlalchemy-utils", "tox", "wheel (>=0.32.0)", "zest.releaser[recommended]"] -doc = ["Sphinx", "sphinx-rtd-theme", "sphinxcontrib-spelling"] - -[[package]] -name = "faker" -version = "14.2.1" -description = "Faker is a Python package that generates fake data for you." -optional = false -python-versions = ">=3.6" -files = [ - {file = "Faker-14.2.1-py3-none-any.whl", hash = "sha256:2e28aaea60456857d4ce95dd12aed767769537ad23d13d51a545cd40a654e9d9"}, - {file = "Faker-14.2.1.tar.gz", hash = "sha256:daad7badb4fd916bd047b28c8459ef4689e4fe6acf61f6dfebee8cc602e4d009"}, -] - -[package.dependencies] -python-dateutil = ">=2.4" - -[[package]] -name = "fancycompleter" -version = "0.9.1" -description = "colorful TAB completion for Python prompt" -optional = false -python-versions = "*" -files = [ - {file = "fancycompleter-0.9.1-py3-none-any.whl", hash = "sha256:dd076bca7d9d524cc7f25ec8f35ef95388ffef9ef46def4d3d25e9b044ad7080"}, - {file = "fancycompleter-0.9.1.tar.gz", hash = "sha256:09e0feb8ae242abdfd7ef2ba55069a46f011814a80fe5476be48f51b00247272"}, -] - -[package.dependencies] -pyreadline = {version = "*", markers = "platform_system == \"Windows\""} -pyrepl = ">=0.8.2" - -[[package]] -name = "filelock" -version = "3.12.2" -description = "A platform independent file lock." -optional = false -python-versions = ">=3.7" -files = [ - {file = "filelock-3.12.2-py3-none-any.whl", hash = "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec"}, - {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"}, -] - -[package.extras] -docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] - -[[package]] -name = "flake8" -version = "6.1.0" -description = "the modular source code checker: pep8 pyflakes and co" -optional = false -python-versions = ">=3.8.1" -files = [ - {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, - {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, -] - -[package.dependencies] -mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.11.0,<2.12.0" -pyflakes = ">=3.1.0,<3.2.0" - -[[package]] -name = "freezegun" -version = "1.2.2" -description = "Let your Python tests travel through time" -optional = false -python-versions = ">=3.6" -files = [ - {file = "freezegun-1.2.2-py3-none-any.whl", hash = "sha256:ea1b963b993cb9ea195adbd893a48d573fda951b0da64f60883d7e988b606c9f"}, - {file = "freezegun-1.2.2.tar.gz", hash = "sha256:cd22d1ba06941384410cd967d8a99d5ae2442f57dfafeff2fda5de8dc5c05446"}, -] - -[package.dependencies] -python-dateutil = ">=2.7" - -[[package]] -name = "h11" -version = "0.14.0" -description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -optional = false -python-versions = ">=3.7" -files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, -] - -[[package]] -name = "html-json-forms" -version = "1.1.1" -description = "Implementation of the HTML JSON Forms spec for use with the Django REST Framework." -optional = false -python-versions = "*" -files = [ - {file = "html-json-forms-1.1.1.tar.gz", hash = "sha256:16dc413dc858fcc53602ad509c1aef735534838e1bae888bf429e210a9c48f6b"}, - {file = "html_json_forms-1.1.1-py3-none-any.whl", hash = "sha256:51e7e9088bc88e324027144ca25d8bcdd37da28f311a8436bfd88944138ed409"}, -] - -[[package]] -name = "htmlmin" -version = "0.1.12" -description = "An HTML Minifier" -optional = false -python-versions = "*" -files = [ - {file = "htmlmin-0.1.12.tar.gz", hash = "sha256:50c1ef4630374a5d723900096a961cff426dff46b48f34d194a81bbe14eca178"}, -] - -[[package]] -name = "hyperlink" -version = "21.0.0" -description = "A featureful, immutable, and correct URL for Python." -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "hyperlink-21.0.0-py2.py3-none-any.whl", hash = "sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4"}, - {file = "hyperlink-21.0.0.tar.gz", hash = "sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b"}, -] - -[package.dependencies] -idna = ">=2.5" - -[[package]] -name = "identify" -version = "2.5.26" -description = "File identification library for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "identify-2.5.26-py2.py3-none-any.whl", hash = "sha256:c22a8ead0d4ca11f1edd6c9418c3220669b3b7533ada0a0ffa6cc0ef85cf9b54"}, - {file = "identify-2.5.26.tar.gz", hash = "sha256:7243800bce2f58404ed41b7c002e53d4d22bcf3ae1b7900c2d7aefd95394bf7f"}, -] - -[package.extras] -license = ["ukkonen"] - -[[package]] -name = "idna" -version = "3.4" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, -] - -[[package]] -name = "importlib-metadata" -version = "6.8.0" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"}, - {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"}, -] - -[package.dependencies] -zipp = ">=0.5" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] - -[[package]] -name = "incremental" -version = "22.10.0" -description = "\"A small library that versions your Python projects.\"" -optional = false -python-versions = "*" -files = [ - {file = "incremental-22.10.0-py2.py3-none-any.whl", hash = "sha256:b864a1f30885ee72c5ac2835a761b8fe8aa9c28b9395cacf27286602688d3e51"}, - {file = "incremental-22.10.0.tar.gz", hash = "sha256:912feeb5e0f7e0188e6f42241d2f450002e11bbc0937c65865045854c24c0bd0"}, -] - -[package.extras] -mypy = ["click (>=6.0)", "mypy (==0.812)", "twisted (>=16.4.0)"] -scripts = ["click (>=6.0)", "twisted (>=16.4.0)"] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "ipython" -version = "8.14.0" -description = "IPython: Productive Interactive Computing" -optional = false -python-versions = ">=3.9" -files = [ - {file = "ipython-8.14.0-py3-none-any.whl", hash = "sha256:248aca623f5c99a6635bc3857677b7320b9b8039f99f070ee0d20a5ca5a8e6bf"}, - {file = "ipython-8.14.0.tar.gz", hash = "sha256:1d197b907b6ba441b692c48cf2a3a2de280dc0ac91a3405b39349a50272ca0a1"}, -] - -[package.dependencies] -appnope = {version = "*", markers = "sys_platform == \"darwin\""} -backcall = "*" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -decorator = "*" -jedi = ">=0.16" -matplotlib-inline = "*" -pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} -pickleshare = "*" -prompt-toolkit = ">=3.0.30,<3.0.37 || >3.0.37,<3.1.0" -pygments = ">=2.4.0" -stack-data = "*" -traitlets = ">=5" -typing-extensions = {version = "*", markers = "python_version < \"3.10\""} - -[package.extras] -all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] -black = ["black"] -doc = ["docrepr", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] -kernel = ["ipykernel"] -nbconvert = ["nbconvert"] -nbformat = ["nbformat"] -notebook = ["ipywidgets", "notebook"] -parallel = ["ipyparallel"] -qtconsole = ["qtconsole"] -test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] -test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] - -[[package]] -name = "isort" -version = "5.12.0" -description = "A Python utility / library to sort Python imports." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, - {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, -] - -[package.extras] -colors = ["colorama (>=0.4.3)"] -pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] -plugins = ["setuptools"] -requirements-deprecated-finder = ["pip-api", "pipreqs"] - -[[package]] -name = "jedi" -version = "0.19.0" -description = "An autocompletion tool for Python that can be used for text editors." -optional = false -python-versions = ">=3.6" -files = [ - {file = "jedi-0.19.0-py2.py3-none-any.whl", hash = "sha256:cb8ce23fbccff0025e9386b5cf85e892f94c9b822378f8da49970471335ac64e"}, - {file = "jedi-0.19.0.tar.gz", hash = "sha256:bcf9894f1753969cbac8022a8c2eaee06bfa3724e4192470aaffe7eb6272b0c4"}, -] - -[package.dependencies] -parso = ">=0.8.3,<0.9.0" - -[package.extras] -docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] -qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] - -[[package]] -name = "jmespath" -version = "1.0.1" -description = "JSON Matching Expressions" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, - {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, -] - -[[package]] -name = "jsonpickle" -version = "2.2.0" -description = "Python library for serializing any arbitrary object graph into JSON" -optional = false -python-versions = ">=2.7" -files = [ - {file = "jsonpickle-2.2.0-py2.py3-none-any.whl", hash = "sha256:de7f2613818aa4f234138ca11243d6359ff83ae528b2185efdd474f62bcf9ae1"}, - {file = "jsonpickle-2.2.0.tar.gz", hash = "sha256:7b272918b0554182e53dc340ddd62d9b7f902fec7e7b05620c04f3ccef479a0e"}, -] - -[package.extras] -docs = ["jaraco.packaging (>=3.2)", "rst.linker (>=1.9)", "sphinx"] -testing = ["ecdsa", "enum34", "feedparser", "jsonlib", "numpy", "pandas", "pymongo", "pytest (>=3.5,!=3.7.3)", "pytest-black-multipy", "pytest-checkdocs (>=1.2.3)", "pytest-cov", "pytest-flake8 (<1.1.0)", "pytest-flake8 (>=1.1.1)", "scikit-learn", "sqlalchemy"] -testing-libs = ["simplejson", "ujson", "yajl"] - -[[package]] -name = "kombu" -version = "5.3.1" -description = "Messaging library for Python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "kombu-5.3.1-py3-none-any.whl", hash = "sha256:48ee589e8833126fd01ceaa08f8a2041334e9f5894e5763c8486a550454551e9"}, - {file = "kombu-5.3.1.tar.gz", hash = "sha256:fbd7572d92c0bf71c112a6b45163153dea5a7b6a701ec16b568c27d0fd2370f2"}, -] - -[package.dependencies] -amqp = ">=5.1.1,<6.0.0" -typing-extensions = {version = "*", markers = "python_version < \"3.10\""} -vine = "*" - -[package.extras] -azureservicebus = ["azure-servicebus (>=7.10.0)"] -azurestoragequeues = ["azure-identity (>=1.12.0)", "azure-storage-queue (>=12.6.0)"] -confluentkafka = ["confluent-kafka (==2.1.1)"] -consul = ["python-consul2"] -librabbitmq = ["librabbitmq (>=2.0.0)"] -mongodb = ["pymongo (>=4.1.1)"] -msgpack = ["msgpack"] -pyro = ["pyro4"] -qpid = ["qpid-python (>=0.26)", "qpid-tools (>=0.26)"] -redis = ["redis (>=4.5.2)"] -slmq = ["softlayer-messaging (>=1.0.3)"] -sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"] -sqs = ["boto3 (>=1.26.143)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"] -yaml = ["PyYAML (>=3.10)"] -zookeeper = ["kazoo (>=2.8.0)"] - -[[package]] -name = "lxml" -version = "4.9.3" -description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" -files = [ - {file = "lxml-4.9.3-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c"}, - {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d"}, - {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef"}, - {file = "lxml-4.9.3-cp27-cp27m-win32.whl", hash = "sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7"}, - {file = "lxml-4.9.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1"}, - {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb"}, - {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e"}, - {file = "lxml-4.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76"}, - {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23"}, - {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1247694b26342a7bf47c02e513d32225ededd18045264d40758abeb3c838a51f"}, - {file = "lxml-4.9.3-cp310-cp310-win32.whl", hash = "sha256:cdb650fc86227eba20de1a29d4b2c1bfe139dc75a0669270033cb2ea3d391b85"}, - {file = "lxml-4.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d"}, - {file = "lxml-4.9.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b"}, - {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120"}, - {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e28c51fa0ce5674be9f560c6761c1b441631901993f76700b1b30ca6c8378d6"}, - {file = "lxml-4.9.3-cp311-cp311-win32.whl", hash = "sha256:0bfd0767c5c1de2551a120673b72e5d4b628737cb05414f03c3277bf9bed3305"}, - {file = "lxml-4.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:25f32acefac14ef7bd53e4218fe93b804ef6f6b92ffdb4322bb6d49d94cad2bc"}, - {file = "lxml-4.9.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:d3ff32724f98fbbbfa9f49d82852b159e9784d6094983d9a8b7f2ddaebb063d4"}, - {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48d6ed886b343d11493129e019da91d4039826794a3e3027321c56d9e71505be"}, - {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9a92d3faef50658dd2c5470af249985782bf754c4e18e15afb67d3ab06233f13"}, - {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b4e4bc18382088514ebde9328da057775055940a1f2e18f6ad2d78aa0f3ec5b9"}, - {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fc9b106a1bf918db68619fdcd6d5ad4f972fdd19c01d19bdb6bf63f3589a9ec5"}, - {file = "lxml-4.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:d37017287a7adb6ab77e1c5bee9bcf9660f90ff445042b790402a654d2ad81d8"}, - {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:56dc1f1ebccc656d1b3ed288f11e27172a01503fc016bcabdcbc0978b19352b7"}, - {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:578695735c5a3f51569810dfebd05dd6f888147a34f0f98d4bb27e92b76e05c2"}, - {file = "lxml-4.9.3-cp35-cp35m-win32.whl", hash = "sha256:704f61ba8c1283c71b16135caf697557f5ecf3e74d9e453233e4771d68a1f42d"}, - {file = "lxml-4.9.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c41bfca0bd3532d53d16fd34d20806d5c2b1ace22a2f2e4c0008570bf2c58833"}, - {file = "lxml-4.9.3-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584"}, - {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0c0850c8b02c298d3c7006b23e98249515ac57430e16a166873fc47a5d549287"}, - {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aca086dc5f9ef98c512bac8efea4483eb84abbf926eaeedf7b91479feb092458"}, - {file = "lxml-4.9.3-cp36-cp36m-win32.whl", hash = "sha256:50baa9c1c47efcaef189f31e3d00d697c6d4afda5c3cde0302d063492ff9b477"}, - {file = "lxml-4.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4"}, - {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:081d32421db5df44c41b7f08a334a090a545c54ba977e47fd7cc2deece78809a"}, - {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:23eed6d7b1a3336ad92d8e39d4bfe09073c31bfe502f20ca5116b2a334f8ec02"}, - {file = "lxml-4.9.3-cp37-cp37m-win32.whl", hash = "sha256:1509dd12b773c02acd154582088820893109f6ca27ef7291b003d0e81666109f"}, - {file = "lxml-4.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa"}, - {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3e9bdd30efde2b9ccfa9cb5768ba04fe71b018a25ea093379c857c9dad262c40"}, - {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fcdd00edfd0a3001e0181eab3e63bd5c74ad3e67152c84f93f13769a40e073a7"}, - {file = "lxml-4.9.3-cp38-cp38-win32.whl", hash = "sha256:57aba1bbdf450b726d58b2aea5fe47c7875f5afb2c4a23784ed78f19a0462574"}, - {file = "lxml-4.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96"}, - {file = "lxml-4.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6420a005548ad52154c8ceab4a1290ff78d757f9e5cbc68f8c77089acd3c432"}, - {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bb3bb49c7a6ad9d981d734ef7c7193bc349ac338776a0360cc671eaee89bcf69"}, - {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d27be7405547d1f958b60837dc4c1007da90b8b23f54ba1f8b728c78fdb19d50"}, - {file = "lxml-4.9.3-cp39-cp39-win32.whl", hash = "sha256:8df133a2ea5e74eef5e8fc6f19b9e085f758768a16e9877a60aec455ed2609b2"}, - {file = "lxml-4.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2"}, - {file = "lxml-4.9.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35"}, - {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0"}, - {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3"}, - {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9"}, - {file = "lxml-4.9.3.tar.gz", hash = "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c"}, -] - -[package.extras] -cssselect = ["cssselect (>=0.7)"] -html5 = ["html5lib"] -htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=0.29.35)"] - -[[package]] -name = "markdown" -version = "3.4.4" -description = "Python implementation of John Gruber's Markdown." -optional = false -python-versions = ">=3.7" -files = [ - {file = "Markdown-3.4.4-py3-none-any.whl", hash = "sha256:a4c1b65c0957b4bd9e7d86ddc7b3c9868fb9670660f6f99f6d1bca8954d5a941"}, - {file = "Markdown-3.4.4.tar.gz", hash = "sha256:225c6123522495d4119a90b3a3ba31a1e87a70369e03f14799ea9c0d7183a3d6"}, -] - -[package.dependencies] -importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} - -[package.extras] -docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.0)", "mkdocs-nature (>=0.4)"] -testing = ["coverage", "pyyaml"] - -[[package]] -name = "markuppy" -version = "1.14" -description = "An HTML/XML generator" -optional = false -python-versions = "*" -files = [ - {file = "MarkupPy-1.14.tar.gz", hash = "sha256:1adee2c0a542af378fe84548ff6f6b0168f3cb7f426b46961038a2bcfaad0d5f"}, -] - -[[package]] -name = "matplotlib-inline" -version = "0.1.6" -description = "Inline Matplotlib backend for Jupyter" -optional = false -python-versions = ">=3.5" -files = [ - {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, - {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, -] - -[package.dependencies] -traitlets = "*" - -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -optional = false -python-versions = ">=3.6" -files = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] - -[[package]] -name = "msgpack" -version = "1.0.5" -description = "MessagePack serializer" -optional = false -python-versions = "*" -files = [ - {file = "msgpack-1.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:525228efd79bb831cf6830a732e2e80bc1b05436b086d4264814b4b2955b2fa9"}, - {file = "msgpack-1.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4f8d8b3bf1ff2672567d6b5c725a1b347fe838b912772aa8ae2bf70338d5a198"}, - {file = "msgpack-1.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cdc793c50be3f01106245a61b739328f7dccc2c648b501e237f0699fe1395b81"}, - {file = "msgpack-1.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cb47c21a8a65b165ce29f2bec852790cbc04936f502966768e4aae9fa763cb7"}, - {file = "msgpack-1.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e42b9594cc3bf4d838d67d6ed62b9e59e201862a25e9a157019e171fbe672dd3"}, - {file = "msgpack-1.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:55b56a24893105dc52c1253649b60f475f36b3aa0fc66115bffafb624d7cb30b"}, - {file = "msgpack-1.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1967f6129fc50a43bfe0951c35acbb729be89a55d849fab7686004da85103f1c"}, - {file = "msgpack-1.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20a97bf595a232c3ee6d57ddaadd5453d174a52594bf9c21d10407e2a2d9b3bd"}, - {file = "msgpack-1.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d25dd59bbbbb996eacf7be6b4ad082ed7eacc4e8f3d2df1ba43822da9bfa122a"}, - {file = "msgpack-1.0.5-cp310-cp310-win32.whl", hash = "sha256:382b2c77589331f2cb80b67cc058c00f225e19827dbc818d700f61513ab47bea"}, - {file = "msgpack-1.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:4867aa2df9e2a5fa5f76d7d5565d25ec76e84c106b55509e78c1ede0f152659a"}, - {file = "msgpack-1.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9f5ae84c5c8a857ec44dc180a8b0cc08238e021f57abdf51a8182e915e6299f0"}, - {file = "msgpack-1.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9e6ca5d5699bcd89ae605c150aee83b5321f2115695e741b99618f4856c50898"}, - {file = "msgpack-1.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5494ea30d517a3576749cad32fa27f7585c65f5f38309c88c6d137877fa28a5a"}, - {file = "msgpack-1.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ab2f3331cb1b54165976a9d976cb251a83183631c88076613c6c780f0d6e45a"}, - {file = "msgpack-1.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28592e20bbb1620848256ebc105fc420436af59515793ed27d5c77a217477705"}, - {file = "msgpack-1.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe5c63197c55bce6385d9aee16c4d0641684628f63ace85f73571e65ad1c1e8d"}, - {file = "msgpack-1.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ed40e926fa2f297e8a653c954b732f125ef97bdd4c889f243182299de27e2aa9"}, - {file = "msgpack-1.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b2de4c1c0538dcb7010902a2b97f4e00fc4ddf2c8cda9749af0e594d3b7fa3d7"}, - {file = "msgpack-1.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bf22a83f973b50f9d38e55c6aade04c41ddda19b00c4ebc558930d78eecc64ed"}, - {file = "msgpack-1.0.5-cp311-cp311-win32.whl", hash = "sha256:c396e2cc213d12ce017b686e0f53497f94f8ba2b24799c25d913d46c08ec422c"}, - {file = "msgpack-1.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c4c68d87497f66f96d50142a2b73b97972130d93677ce930718f68828b382e2"}, - {file = "msgpack-1.0.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a2b031c2e9b9af485d5e3c4520f4220d74f4d222a5b8dc8c1a3ab9448ca79c57"}, - {file = "msgpack-1.0.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f837b93669ce4336e24d08286c38761132bc7ab29782727f8557e1eb21b2080"}, - {file = "msgpack-1.0.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1d46dfe3832660f53b13b925d4e0fa1432b00f5f7210eb3ad3bb9a13c6204a6"}, - {file = "msgpack-1.0.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:366c9a7b9057e1547f4ad51d8facad8b406bab69c7d72c0eb6f529cf76d4b85f"}, - {file = "msgpack-1.0.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:4c075728a1095efd0634a7dccb06204919a2f67d1893b6aa8e00497258bf926c"}, - {file = "msgpack-1.0.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:f933bbda5a3ee63b8834179096923b094b76f0c7a73c1cfe8f07ad608c58844b"}, - {file = "msgpack-1.0.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:36961b0568c36027c76e2ae3ca1132e35123dcec0706c4b7992683cc26c1320c"}, - {file = "msgpack-1.0.5-cp36-cp36m-win32.whl", hash = "sha256:b5ef2f015b95f912c2fcab19c36814963b5463f1fb9049846994b007962743e9"}, - {file = "msgpack-1.0.5-cp36-cp36m-win_amd64.whl", hash = "sha256:288e32b47e67f7b171f86b030e527e302c91bd3f40fd9033483f2cacc37f327a"}, - {file = "msgpack-1.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:137850656634abddfb88236008339fdaba3178f4751b28f270d2ebe77a563b6c"}, - {file = "msgpack-1.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c05a4a96585525916b109bb85f8cb6511db1c6f5b9d9cbcbc940dc6b4be944b"}, - {file = "msgpack-1.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56a62ec00b636583e5cb6ad313bbed36bb7ead5fa3a3e38938503142c72cba4f"}, - {file = "msgpack-1.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef8108f8dedf204bb7b42994abf93882da1159728a2d4c5e82012edd92c9da9f"}, - {file = "msgpack-1.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1835c84d65f46900920b3708f5ba829fb19b1096c1800ad60bae8418652a951d"}, - {file = "msgpack-1.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e57916ef1bd0fee4f21c4600e9d1da352d8816b52a599c46460e93a6e9f17086"}, - {file = "msgpack-1.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:17358523b85973e5f242ad74aa4712b7ee560715562554aa2134d96e7aa4cbbf"}, - {file = "msgpack-1.0.5-cp37-cp37m-win32.whl", hash = "sha256:cb5aaa8c17760909ec6cb15e744c3ebc2ca8918e727216e79607b7bbce9c8f77"}, - {file = "msgpack-1.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:ab31e908d8424d55601ad7075e471b7d0140d4d3dd3272daf39c5c19d936bd82"}, - {file = "msgpack-1.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b72d0698f86e8d9ddf9442bdedec15b71df3598199ba33322d9711a19f08145c"}, - {file = "msgpack-1.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:379026812e49258016dd84ad79ac8446922234d498058ae1d415f04b522d5b2d"}, - {file = "msgpack-1.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:332360ff25469c346a1c5e47cbe2a725517919892eda5cfaffe6046656f0b7bb"}, - {file = "msgpack-1.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:476a8fe8fae289fdf273d6d2a6cb6e35b5a58541693e8f9f019bfe990a51e4ba"}, - {file = "msgpack-1.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9985b214f33311df47e274eb788a5893a761d025e2b92c723ba4c63936b69b1"}, - {file = "msgpack-1.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48296af57cdb1d885843afd73c4656be5c76c0c6328db3440c9601a98f303d87"}, - {file = "msgpack-1.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:addab7e2e1fcc04bd08e4eb631c2a90960c340e40dfc4a5e24d2ff0d5a3b3edb"}, - {file = "msgpack-1.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:916723458c25dfb77ff07f4c66aed34e47503b2eb3188b3adbec8d8aa6e00f48"}, - {file = "msgpack-1.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:821c7e677cc6acf0fd3f7ac664c98803827ae6de594a9f99563e48c5a2f27eb0"}, - {file = "msgpack-1.0.5-cp38-cp38-win32.whl", hash = "sha256:1c0f7c47f0087ffda62961d425e4407961a7ffd2aa004c81b9c07d9269512f6e"}, - {file = "msgpack-1.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:bae7de2026cbfe3782c8b78b0db9cbfc5455e079f1937cb0ab8d133496ac55e1"}, - {file = "msgpack-1.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:20c784e66b613c7f16f632e7b5e8a1651aa5702463d61394671ba07b2fc9e025"}, - {file = "msgpack-1.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:266fa4202c0eb94d26822d9bfd7af25d1e2c088927fe8de9033d929dd5ba24c5"}, - {file = "msgpack-1.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18334484eafc2b1aa47a6d42427da7fa8f2ab3d60b674120bce7a895a0a85bdd"}, - {file = "msgpack-1.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57e1f3528bd95cc44684beda696f74d3aaa8a5e58c816214b9046512240ef437"}, - {file = "msgpack-1.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:586d0d636f9a628ddc6a17bfd45aa5b5efaf1606d2b60fa5d87b8986326e933f"}, - {file = "msgpack-1.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a740fa0e4087a734455f0fc3abf5e746004c9da72fbd541e9b113013c8dc3282"}, - {file = "msgpack-1.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3055b0455e45810820db1f29d900bf39466df96ddca11dfa6d074fa47054376d"}, - {file = "msgpack-1.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a61215eac016f391129a013c9e46f3ab308db5f5ec9f25811e811f96962599a8"}, - {file = "msgpack-1.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:362d9655cd369b08fda06b6657a303eb7172d5279997abe094512e919cf74b11"}, - {file = "msgpack-1.0.5-cp39-cp39-win32.whl", hash = "sha256:ac9dd47af78cae935901a9a500104e2dea2e253207c924cc95de149606dc43cc"}, - {file = "msgpack-1.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:06f5174b5f8ed0ed919da0e62cbd4ffde676a374aba4020034da05fab67b9164"}, - {file = "msgpack-1.0.5.tar.gz", hash = "sha256:c075544284eadc5cddc70f4757331d99dcbc16b2bbd4849d15f8aae4cf36d31c"}, -] - -[[package]] -name = "mypy" -version = "1.1.1" -description = "Optional static typing for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mypy-1.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39c7119335be05630611ee798cc982623b9e8f0cff04a0b48dfc26100e0b97af"}, - {file = "mypy-1.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61bf08362e93b6b12fad3eab68c4ea903a077b87c90ac06c11e3d7a09b56b9c1"}, - {file = "mypy-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbb19c9f662e41e474e0cff502b7064a7edc6764f5262b6cd91d698163196799"}, - {file = "mypy-1.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:315ac73cc1cce4771c27d426b7ea558fb4e2836f89cb0296cbe056894e3a1f78"}, - {file = "mypy-1.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5cb14ff9919b7df3538590fc4d4c49a0f84392237cbf5f7a816b4161c061829e"}, - {file = "mypy-1.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:26cdd6a22b9b40b2fd71881a8a4f34b4d7914c679f154f43385ca878a8297389"}, - {file = "mypy-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b5f81b40d94c785f288948c16e1f2da37203c6006546c5d947aab6f90aefef2"}, - {file = "mypy-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21b437be1c02712a605591e1ed1d858aba681757a1e55fe678a15c2244cd68a5"}, - {file = "mypy-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d809f88734f44a0d44959d795b1e6f64b2bbe0ea4d9cc4776aa588bb4229fc1c"}, - {file = "mypy-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:a380c041db500e1410bb5b16b3c1c35e61e773a5c3517926b81dfdab7582be54"}, - {file = "mypy-1.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b7c7b708fe9a871a96626d61912e3f4ddd365bf7f39128362bc50cbd74a634d5"}, - {file = "mypy-1.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1c10fa12df1232c936830839e2e935d090fc9ee315744ac33b8a32216b93707"}, - {file = "mypy-1.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0a28a76785bf57655a8ea5eb0540a15b0e781c807b5aa798bd463779988fa1d5"}, - {file = "mypy-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:ef6a01e563ec6a4940784c574d33f6ac1943864634517984471642908b30b6f7"}, - {file = "mypy-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d64c28e03ce40d5303450f547e07418c64c241669ab20610f273c9e6290b4b0b"}, - {file = "mypy-1.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64cc3afb3e9e71a79d06e3ed24bb508a6d66f782aff7e56f628bf35ba2e0ba51"}, - {file = "mypy-1.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce61663faf7a8e5ec6f456857bfbcec2901fbdb3ad958b778403f63b9e606a1b"}, - {file = "mypy-1.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2b0c373d071593deefbcdd87ec8db91ea13bd8f1328d44947e88beae21e8d5e9"}, - {file = "mypy-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:2888ce4fe5aae5a673386fa232473014056967f3904f5abfcf6367b5af1f612a"}, - {file = "mypy-1.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:19ba15f9627a5723e522d007fe708007bae52b93faab00f95d72f03e1afa9598"}, - {file = "mypy-1.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:59bbd71e5c58eed2e992ce6523180e03c221dcd92b52f0e792f291d67b15a71c"}, - {file = "mypy-1.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9401e33814cec6aec8c03a9548e9385e0e228fc1b8b0a37b9ea21038e64cdd8a"}, - {file = "mypy-1.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b398d8b1f4fba0e3c6463e02f8ad3346f71956b92287af22c9b12c3ec965a9f"}, - {file = "mypy-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:69b35d1dcb5707382810765ed34da9db47e7f95b3528334a3c999b0c90fe523f"}, - {file = "mypy-1.1.1-py3-none-any.whl", hash = "sha256:4e4e8b362cdf99ba00c2b218036002bdcdf1e0de085cdb296a49df03fb31dfc4"}, - {file = "mypy-1.1.1.tar.gz", hash = "sha256:ae9ceae0f5b9059f33dbc62dea087e942c0ccab4b7a003719cb70f9b8abfa32f"}, -] - -[package.dependencies] -mypy-extensions = ">=1.0.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=3.10" - -[package.extras] -dmypy = ["psutil (>=4.0)"] -install-types = ["pip"] -python2 = ["typed-ast (>=1.4.0,<2)"] -reports = ["lxml"] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - -[[package]] -name = "natural-keys" -version = "2.1.0" -description = "Enhanced support for natural keys in Django and Django REST Framework." -optional = false -python-versions = ">=3.7" -files = [ - {file = "natural-keys-2.1.0.tar.gz", hash = "sha256:f3c35669235aa70bc5fee9891cc322675a3f3040a28be48db21d1c506ef8ce46"}, - {file = "natural_keys-2.1.0-py3-none-any.whl", hash = "sha256:5374fda8937710f43851b050bb6c0cb01859c578562e0a9310776bd53c8fb31c"}, -] - -[package.dependencies] -html-json-forms = ">=1.0.0" - -[[package]] -name = "nodeenv" -version = "1.8.0" -description = "Node.js virtual environment builder" -optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" -files = [ - {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, - {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, -] - -[package.dependencies] -setuptools = "*" - -[[package]] -name = "oauthlib" -version = "3.2.2" -description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -optional = false -python-versions = ">=3.6" -files = [ - {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, - {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, -] - -[package.extras] -rsa = ["cryptography (>=3.0.0)"] -signals = ["blinker (>=1.4.0)"] -signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] - -[[package]] -name = "odfpy" -version = "1.4.1" -description = "Python API and tools to manipulate OpenDocument files" -optional = false -python-versions = "*" -files = [ - {file = "odfpy-1.4.1.tar.gz", hash = "sha256:db766a6e59c5103212f3cc92ec8dd50a0f3a02790233ed0b52148b70d3c438ec"}, -] - -[package.dependencies] -defusedxml = "*" - -[[package]] -name = "openpyxl" -version = "3.1.2" -description = "A Python library to read/write Excel 2010 xlsx/xlsm files" -optional = false -python-versions = ">=3.6" -files = [ - {file = "openpyxl-3.1.2-py2.py3-none-any.whl", hash = "sha256:f91456ead12ab3c6c2e9491cf33ba6d08357d802192379bb482f1033ade496f5"}, - {file = "openpyxl-3.1.2.tar.gz", hash = "sha256:a6f5977418eff3b2d5500d54d9db50c8277a368436f4e4f8ddb1be3422870184"}, -] - -[package.dependencies] -et-xmlfile = "*" - -[[package]] -name = "outcome" -version = "1.2.0" -description = "Capture the outcome of Python function calls." -optional = false -python-versions = ">=3.7" -files = [ - {file = "outcome-1.2.0-py2.py3-none-any.whl", hash = "sha256:c4ab89a56575d6d38a05aa16daeaa333109c1f96167aba8901ab18b6b5e0f7f5"}, - {file = "outcome-1.2.0.tar.gz", hash = "sha256:6f82bd3de45da303cf1f771ecafa1633750a358436a8bb60e06a1ceb745d2672"}, -] - -[package.dependencies] -attrs = ">=19.2.0" - -[[package]] -name = "packaging" -version = "23.1" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, - {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, -] - -[[package]] -name = "parso" -version = "0.8.3" -description = "A Python Parser" -optional = false -python-versions = ">=3.6" -files = [ - {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, - {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, -] - -[package.extras] -qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] -testing = ["docopt", "pytest (<6.0.0)"] - -[[package]] -name = "pathspec" -version = "0.11.2" -description = "Utility library for gitignore style pattern matching of file paths." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, - {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, -] - -[[package]] -name = "pdbpp" -version = "0.10.3" -description = "pdb++, a drop-in replacement for pdb" -optional = false -python-versions = "*" -files = [ - {file = "pdbpp-0.10.3-py2.py3-none-any.whl", hash = "sha256:79580568e33eb3d6f6b462b1187f53e10cd8e4538f7d31495c9181e2cf9665d1"}, - {file = "pdbpp-0.10.3.tar.gz", hash = "sha256:d9e43f4fda388eeb365f2887f4e7b66ac09dce9b6236b76f63616530e2f669f5"}, -] - -[package.dependencies] -fancycompleter = ">=0.8" -pygments = "*" -wmctrl = "*" - -[package.extras] -funcsigs = ["funcsigs"] -testing = ["funcsigs", "pytest"] - -[[package]] -name = "pexpect" -version = "4.8.0" -description = "Pexpect allows easy control of interactive console applications." -optional = false -python-versions = "*" -files = [ - {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, - {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, -] - -[package.dependencies] -ptyprocess = ">=0.5" - -[[package]] -name = "pickleshare" -version = "0.7.5" -description = "Tiny 'shelve'-like database with concurrency support" -optional = false -python-versions = "*" -files = [ - {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, - {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, -] - -[[package]] -name = "pillow" -version = "9.5.0" -description = "Python Imaging Library (Fork)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "Pillow-9.5.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:ace6ca218308447b9077c14ea4ef381ba0b67ee78d64046b3f19cf4e1139ad16"}, - {file = "Pillow-9.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d3d403753c9d5adc04d4694d35cf0391f0f3d57c8e0030aac09d7678fa8030aa"}, - {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ba1b81ee69573fe7124881762bb4cd2e4b6ed9dd28c9c60a632902fe8db8b38"}, - {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe7e1c262d3392afcf5071df9afa574544f28eac825284596ac6db56e6d11062"}, - {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f36397bf3f7d7c6a3abdea815ecf6fd14e7fcd4418ab24bae01008d8d8ca15e"}, - {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:252a03f1bdddce077eff2354c3861bf437c892fb1832f75ce813ee94347aa9b5"}, - {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:85ec677246533e27770b0de5cf0f9d6e4ec0c212a1f89dfc941b64b21226009d"}, - {file = "Pillow-9.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b416f03d37d27290cb93597335a2f85ed446731200705b22bb927405320de903"}, - {file = "Pillow-9.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1781a624c229cb35a2ac31cc4a77e28cafc8900733a864870c49bfeedacd106a"}, - {file = "Pillow-9.5.0-cp310-cp310-win32.whl", hash = "sha256:8507eda3cd0608a1f94f58c64817e83ec12fa93a9436938b191b80d9e4c0fc44"}, - {file = "Pillow-9.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:d3c6b54e304c60c4181da1c9dadf83e4a54fd266a99c70ba646a9baa626819eb"}, - {file = "Pillow-9.5.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:7ec6f6ce99dab90b52da21cf0dc519e21095e332ff3b399a357c187b1a5eee32"}, - {file = "Pillow-9.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:560737e70cb9c6255d6dcba3de6578a9e2ec4b573659943a5e7e4af13f298f5c"}, - {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96e88745a55b88a7c64fa49bceff363a1a27d9a64e04019c2281049444a571e3"}, - {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d9c206c29b46cfd343ea7cdfe1232443072bbb270d6a46f59c259460db76779a"}, - {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cfcc2c53c06f2ccb8976fb5c71d448bdd0a07d26d8e07e321c103416444c7ad1"}, - {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:a0f9bb6c80e6efcde93ffc51256d5cfb2155ff8f78292f074f60f9e70b942d99"}, - {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:8d935f924bbab8f0a9a28404422da8af4904e36d5c33fc6f677e4c4485515625"}, - {file = "Pillow-9.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fed1e1cf6a42577953abbe8e6cf2fe2f566daebde7c34724ec8803c4c0cda579"}, - {file = "Pillow-9.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c1170d6b195555644f0616fd6ed929dfcf6333b8675fcca044ae5ab110ded296"}, - {file = "Pillow-9.5.0-cp311-cp311-win32.whl", hash = "sha256:54f7102ad31a3de5666827526e248c3530b3a33539dbda27c6843d19d72644ec"}, - {file = "Pillow-9.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:cfa4561277f677ecf651e2b22dc43e8f5368b74a25a8f7d1d4a3a243e573f2d4"}, - {file = "Pillow-9.5.0-cp311-cp311-win_arm64.whl", hash = "sha256:965e4a05ef364e7b973dd17fc765f42233415974d773e82144c9bbaaaea5d089"}, - {file = "Pillow-9.5.0-cp312-cp312-win32.whl", hash = "sha256:22baf0c3cf0c7f26e82d6e1adf118027afb325e703922c8dfc1d5d0156bb2eeb"}, - {file = "Pillow-9.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:432b975c009cf649420615388561c0ce7cc31ce9b2e374db659ee4f7d57a1f8b"}, - {file = "Pillow-9.5.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:5d4ebf8e1db4441a55c509c4baa7a0587a0210f7cd25fcfe74dbbce7a4bd1906"}, - {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:375f6e5ee9620a271acb6820b3d1e94ffa8e741c0601db4c0c4d3cb0a9c224bf"}, - {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99eb6cafb6ba90e436684e08dad8be1637efb71c4f2180ee6b8f940739406e78"}, - {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dfaaf10b6172697b9bceb9a3bd7b951819d1ca339a5ef294d1f1ac6d7f63270"}, - {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:763782b2e03e45e2c77d7779875f4432e25121ef002a41829d8868700d119392"}, - {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:35f6e77122a0c0762268216315bf239cf52b88865bba522999dc38f1c52b9b47"}, - {file = "Pillow-9.5.0-cp37-cp37m-win32.whl", hash = "sha256:aca1c196f407ec7cf04dcbb15d19a43c507a81f7ffc45b690899d6a76ac9fda7"}, - {file = "Pillow-9.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322724c0032af6692456cd6ed554bb85f8149214d97398bb80613b04e33769f6"}, - {file = "Pillow-9.5.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:a0aa9417994d91301056f3d0038af1199eb7adc86e646a36b9e050b06f526597"}, - {file = "Pillow-9.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f8286396b351785801a976b1e85ea88e937712ee2c3ac653710a4a57a8da5d9c"}, - {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c830a02caeb789633863b466b9de10c015bded434deb3ec87c768e53752ad22a"}, - {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fbd359831c1657d69bb81f0db962905ee05e5e9451913b18b831febfe0519082"}, - {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8fc330c3370a81bbf3f88557097d1ea26cd8b019d6433aa59f71195f5ddebbf"}, - {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:7002d0797a3e4193c7cdee3198d7c14f92c0836d6b4a3f3046a64bd1ce8df2bf"}, - {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:229e2c79c00e85989a34b5981a2b67aa079fd08c903f0aaead522a1d68d79e51"}, - {file = "Pillow-9.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9adf58f5d64e474bed00d69bcd86ec4bcaa4123bfa70a65ce72e424bfb88ed96"}, - {file = "Pillow-9.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:662da1f3f89a302cc22faa9f14a262c2e3951f9dbc9617609a47521c69dd9f8f"}, - {file = "Pillow-9.5.0-cp38-cp38-win32.whl", hash = "sha256:6608ff3bf781eee0cd14d0901a2b9cc3d3834516532e3bd673a0a204dc8615fc"}, - {file = "Pillow-9.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:e49eb4e95ff6fd7c0c402508894b1ef0e01b99a44320ba7d8ecbabefddcc5569"}, - {file = "Pillow-9.5.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:482877592e927fd263028c105b36272398e3e1be3269efda09f6ba21fd83ec66"}, - {file = "Pillow-9.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3ded42b9ad70e5f1754fb7c2e2d6465a9c842e41d178f262e08b8c85ed8a1d8e"}, - {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c446d2245ba29820d405315083d55299a796695d747efceb5717a8b450324115"}, - {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8aca1152d93dcc27dc55395604dcfc55bed5f25ef4c98716a928bacba90d33a3"}, - {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:608488bdcbdb4ba7837461442b90ea6f3079397ddc968c31265c1e056964f1ef"}, - {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:60037a8db8750e474af7ffc9faa9b5859e6c6d0a50e55c45576bf28be7419705"}, - {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:07999f5834bdc404c442146942a2ecadd1cb6292f5229f4ed3b31e0a108746b1"}, - {file = "Pillow-9.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a127ae76092974abfbfa38ca2d12cbeddcdeac0fb71f9627cc1135bedaf9d51a"}, - {file = "Pillow-9.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:489f8389261e5ed43ac8ff7b453162af39c3e8abd730af8363587ba64bb2e865"}, - {file = "Pillow-9.5.0-cp39-cp39-win32.whl", hash = "sha256:9b1af95c3a967bf1da94f253e56b6286b50af23392a886720f563c547e48e964"}, - {file = "Pillow-9.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:77165c4a5e7d5a284f10a6efaa39a0ae8ba839da344f20b111d62cc932fa4e5d"}, - {file = "Pillow-9.5.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:833b86a98e0ede388fa29363159c9b1a294b0905b5128baf01db683672f230f5"}, - {file = "Pillow-9.5.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aaf305d6d40bd9632198c766fb64f0c1a83ca5b667f16c1e79e1661ab5060140"}, - {file = "Pillow-9.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0852ddb76d85f127c135b6dd1f0bb88dbb9ee990d2cd9aa9e28526c93e794fba"}, - {file = "Pillow-9.5.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:91ec6fe47b5eb5a9968c79ad9ed78c342b1f97a091677ba0e012701add857829"}, - {file = "Pillow-9.5.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cb841572862f629b99725ebaec3287fc6d275be9b14443ea746c1dd325053cbd"}, - {file = "Pillow-9.5.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:c380b27d041209b849ed246b111b7c166ba36d7933ec6e41175fd15ab9eb1572"}, - {file = "Pillow-9.5.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c9af5a3b406a50e313467e3565fc99929717f780164fe6fbb7704edba0cebbe"}, - {file = "Pillow-9.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5671583eab84af046a397d6d0ba25343c00cd50bce03787948e0fff01d4fd9b1"}, - {file = "Pillow-9.5.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:84a6f19ce086c1bf894644b43cd129702f781ba5751ca8572f08aa40ef0ab7b7"}, - {file = "Pillow-9.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1e7723bd90ef94eda669a3c2c19d549874dd5badaeefabefd26053304abe5799"}, - {file = "Pillow-9.5.0.tar.gz", hash = "sha256:bf548479d336726d7a0eceb6e767e179fbde37833ae42794602631a070d630f1"}, -] - -[package.extras] -docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] -tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "platformdirs" -version = "3.10.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -optional = false -python-versions = ">=3.7" -files = [ - {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, - {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, -] - -[package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] - -[[package]] -name = "pluggy" -version = "1.2.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, - {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "pre-commit" -version = "3.3.3" -description = "A framework for managing and maintaining multi-language pre-commit hooks." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pre_commit-3.3.3-py2.py3-none-any.whl", hash = "sha256:10badb65d6a38caff29703362271d7dca483d01da88f9d7e05d0b97171c136cb"}, - {file = "pre_commit-3.3.3.tar.gz", hash = "sha256:a2256f489cd913d575c145132ae196fe335da32d91a8294b7afe6622335dd023"}, -] - -[package.dependencies] -cfgv = ">=2.0.0" -identify = ">=1.0.0" -nodeenv = ">=0.11.1" -pyyaml = ">=5.1" -virtualenv = ">=20.10.0" - -[[package]] -name = "prompt-toolkit" -version = "3.0.39" -description = "Library for building powerful interactive command lines in Python" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "prompt_toolkit-3.0.39-py3-none-any.whl", hash = "sha256:9dffbe1d8acf91e3de75f3b544e4842382fc06c6babe903ac9acb74dc6e08d88"}, - {file = "prompt_toolkit-3.0.39.tar.gz", hash = "sha256:04505ade687dc26dc4284b1ad19a83be2f2afe83e7a828ace0c72f3a1df72aac"}, -] - -[package.dependencies] -wcwidth = "*" - -[[package]] -name = "psutil" -version = "5.9.5" -description = "Cross-platform lib for process and system monitoring in Python." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "psutil-5.9.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:be8929ce4313f9f8146caad4272f6abb8bf99fc6cf59344a3167ecd74f4f203f"}, - {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ab8ed1a1d77c95453db1ae00a3f9c50227ebd955437bcf2a574ba8adbf6a74d5"}, - {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4aef137f3345082a3d3232187aeb4ac4ef959ba3d7c10c33dd73763fbc063da4"}, - {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ea8518d152174e1249c4f2a1c89e3e6065941df2fa13a1ab45327716a23c2b48"}, - {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:acf2aef9391710afded549ff602b5887d7a2349831ae4c26be7c807c0a39fac4"}, - {file = "psutil-5.9.5-cp27-none-win32.whl", hash = "sha256:5b9b8cb93f507e8dbaf22af6a2fd0ccbe8244bf30b1baad6b3954e935157ae3f"}, - {file = "psutil-5.9.5-cp27-none-win_amd64.whl", hash = "sha256:8c5f7c5a052d1d567db4ddd231a9d27a74e8e4a9c3f44b1032762bd7b9fdcd42"}, - {file = "psutil-5.9.5-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:3c6f686f4225553615612f6d9bc21f1c0e305f75d7d8454f9b46e901778e7217"}, - {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a7dd9997128a0d928ed4fb2c2d57e5102bb6089027939f3b722f3a210f9a8da"}, - {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89518112647f1276b03ca97b65cc7f64ca587b1eb0278383017c2a0dcc26cbe4"}, - {file = "psutil-5.9.5-cp36-abi3-win32.whl", hash = "sha256:104a5cc0e31baa2bcf67900be36acde157756b9c44017b86b2c049f11957887d"}, - {file = "psutil-5.9.5-cp36-abi3-win_amd64.whl", hash = "sha256:b258c0c1c9d145a1d5ceffab1134441c4c5113b2417fafff7315a917a026c3c9"}, - {file = "psutil-5.9.5-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:c607bb3b57dc779d55e1554846352b4e358c10fff3abf3514a7a6601beebdb30"}, - {file = "psutil-5.9.5.tar.gz", hash = "sha256:5410638e4df39c54d957fc51ce03048acd8e6d60abc0f5107af51e5fb566eb3c"}, -] - -[package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] - -[[package]] -name = "psycopg2-binary" -version = "2.9.7" -description = "psycopg2 - Python-PostgreSQL Database Adapter" -optional = false -python-versions = ">=3.6" -files = [ - {file = "psycopg2-binary-2.9.7.tar.gz", hash = "sha256:1b918f64a51ffe19cd2e230b3240ba481330ce1d4b7875ae67305bd1d37b041c"}, - {file = "psycopg2_binary-2.9.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ea5f8ee87f1eddc818fc04649d952c526db4426d26bab16efbe5a0c52b27d6ab"}, - {file = "psycopg2_binary-2.9.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2993ccb2b7e80844d534e55e0f12534c2871952f78e0da33c35e648bf002bbff"}, - {file = "psycopg2_binary-2.9.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbbc3c5d15ed76b0d9db7753c0db40899136ecfe97d50cbde918f630c5eb857a"}, - {file = "psycopg2_binary-2.9.7-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:692df8763b71d42eb8343f54091368f6f6c9cfc56dc391858cdb3c3ef1e3e584"}, - {file = "psycopg2_binary-2.9.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9dcfd5d37e027ec393a303cc0a216be564b96c80ba532f3d1e0d2b5e5e4b1e6e"}, - {file = "psycopg2_binary-2.9.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17cc17a70dfb295a240db7f65b6d8153c3d81efb145d76da1e4a096e9c5c0e63"}, - {file = "psycopg2_binary-2.9.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e5666632ba2b0d9757b38fc17337d84bdf932d38563c5234f5f8c54fd01349c9"}, - {file = "psycopg2_binary-2.9.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7db7b9b701974c96a88997d458b38ccb110eba8f805d4b4f74944aac48639b42"}, - {file = "psycopg2_binary-2.9.7-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c82986635a16fb1fa15cd5436035c88bc65c3d5ced1cfaac7f357ee9e9deddd4"}, - {file = "psycopg2_binary-2.9.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4fe13712357d802080cfccbf8c6266a3121dc0e27e2144819029095ccf708372"}, - {file = "psycopg2_binary-2.9.7-cp310-cp310-win32.whl", hash = "sha256:122641b7fab18ef76b18860dd0c772290566b6fb30cc08e923ad73d17461dc63"}, - {file = "psycopg2_binary-2.9.7-cp310-cp310-win_amd64.whl", hash = "sha256:f8651cf1f144f9ee0fa7d1a1df61a9184ab72962531ca99f077bbdcba3947c58"}, - {file = "psycopg2_binary-2.9.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4ecc15666f16f97709106d87284c136cdc82647e1c3f8392a672616aed3c7151"}, - {file = "psycopg2_binary-2.9.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3fbb1184c7e9d28d67671992970718c05af5f77fc88e26fd7136613c4ece1f89"}, - {file = "psycopg2_binary-2.9.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a7968fd20bd550431837656872c19575b687f3f6f98120046228e451e4064df"}, - {file = "psycopg2_binary-2.9.7-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:094af2e77a1976efd4956a031028774b827029729725e136514aae3cdf49b87b"}, - {file = "psycopg2_binary-2.9.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:26484e913d472ecb6b45937ea55ce29c57c662066d222fb0fbdc1fab457f18c5"}, - {file = "psycopg2_binary-2.9.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f309b77a7c716e6ed9891b9b42953c3ff7d533dc548c1e33fddc73d2f5e21f9"}, - {file = "psycopg2_binary-2.9.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6d92e139ca388ccfe8c04aacc163756e55ba4c623c6ba13d5d1595ed97523e4b"}, - {file = "psycopg2_binary-2.9.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2df562bb2e4e00ee064779902d721223cfa9f8f58e7e52318c97d139cf7f012d"}, - {file = "psycopg2_binary-2.9.7-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:4eec5d36dbcfc076caab61a2114c12094c0b7027d57e9e4387b634e8ab36fd44"}, - {file = "psycopg2_binary-2.9.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1011eeb0c51e5b9ea1016f0f45fa23aca63966a4c0afcf0340ccabe85a9f65bd"}, - {file = "psycopg2_binary-2.9.7-cp311-cp311-win32.whl", hash = "sha256:ded8e15f7550db9e75c60b3d9fcbc7737fea258a0f10032cdb7edc26c2a671fd"}, - {file = "psycopg2_binary-2.9.7-cp311-cp311-win_amd64.whl", hash = "sha256:8a136c8aaf6615653450817a7abe0fc01e4ea720ae41dfb2823eccae4b9062a3"}, - {file = "psycopg2_binary-2.9.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2dec5a75a3a5d42b120e88e6ed3e3b37b46459202bb8e36cd67591b6e5feebc1"}, - {file = "psycopg2_binary-2.9.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc10da7e7df3380426521e8c1ed975d22df678639da2ed0ec3244c3dc2ab54c8"}, - {file = "psycopg2_binary-2.9.7-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee919b676da28f78f91b464fb3e12238bd7474483352a59c8a16c39dfc59f0c5"}, - {file = "psycopg2_binary-2.9.7-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb1c0e682138f9067a58fc3c9a9bf1c83d8e08cfbee380d858e63196466d5c86"}, - {file = "psycopg2_binary-2.9.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00d8db270afb76f48a499f7bb8fa70297e66da67288471ca873db88382850bf4"}, - {file = "psycopg2_binary-2.9.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9b0c2b466b2f4d89ccc33784c4ebb1627989bd84a39b79092e560e937a11d4ac"}, - {file = "psycopg2_binary-2.9.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:51d1b42d44f4ffb93188f9b39e6d1c82aa758fdb8d9de65e1ddfe7a7d250d7ad"}, - {file = "psycopg2_binary-2.9.7-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:11abdbfc6f7f7dea4a524b5f4117369b0d757725798f1593796be6ece20266cb"}, - {file = "psycopg2_binary-2.9.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f02f4a72cc3ab2565c6d9720f0343cb840fb2dc01a2e9ecb8bc58ccf95dc5c06"}, - {file = "psycopg2_binary-2.9.7-cp37-cp37m-win32.whl", hash = "sha256:81d5dd2dd9ab78d31a451e357315f201d976c131ca7d43870a0e8063b6b7a1ec"}, - {file = "psycopg2_binary-2.9.7-cp37-cp37m-win_amd64.whl", hash = "sha256:62cb6de84d7767164a87ca97e22e5e0a134856ebcb08f21b621c6125baf61f16"}, - {file = "psycopg2_binary-2.9.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:59f7e9109a59dfa31efa022e94a244736ae401526682de504e87bd11ce870c22"}, - {file = "psycopg2_binary-2.9.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:95a7a747bdc3b010bb6a980f053233e7610276d55f3ca506afff4ad7749ab58a"}, - {file = "psycopg2_binary-2.9.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c721ee464e45ecf609ff8c0a555018764974114f671815a0a7152aedb9f3343"}, - {file = "psycopg2_binary-2.9.7-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4f37bbc6588d402980ffbd1f3338c871368fb4b1cfa091debe13c68bb3852b3"}, - {file = "psycopg2_binary-2.9.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac83ab05e25354dad798401babaa6daa9577462136ba215694865394840e31f8"}, - {file = "psycopg2_binary-2.9.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:024eaeb2a08c9a65cd5f94b31ace1ee3bb3f978cd4d079406aef85169ba01f08"}, - {file = "psycopg2_binary-2.9.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1c31c2606ac500dbd26381145684d87730a2fac9a62ebcfbaa2b119f8d6c19f4"}, - {file = "psycopg2_binary-2.9.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:42a62ef0e5abb55bf6ffb050eb2b0fcd767261fa3faf943a4267539168807522"}, - {file = "psycopg2_binary-2.9.7-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:7952807f95c8eba6a8ccb14e00bf170bb700cafcec3924d565235dffc7dc4ae8"}, - {file = "psycopg2_binary-2.9.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e02bc4f2966475a7393bd0f098e1165d470d3fa816264054359ed4f10f6914ea"}, - {file = "psycopg2_binary-2.9.7-cp38-cp38-win32.whl", hash = "sha256:fdca0511458d26cf39b827a663d7d87db6f32b93efc22442a742035728603d5f"}, - {file = "psycopg2_binary-2.9.7-cp38-cp38-win_amd64.whl", hash = "sha256:d0b16e5bb0ab78583f0ed7ab16378a0f8a89a27256bb5560402749dbe8a164d7"}, - {file = "psycopg2_binary-2.9.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6822c9c63308d650db201ba22fe6648bd6786ca6d14fdaf273b17e15608d0852"}, - {file = "psycopg2_binary-2.9.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f94cb12150d57ea433e3e02aabd072205648e86f1d5a0a692d60242f7809b15"}, - {file = "psycopg2_binary-2.9.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5ee89587696d808c9a00876065d725d4ae606f5f7853b961cdbc348b0f7c9a1"}, - {file = "psycopg2_binary-2.9.7-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad5ec10b53cbb57e9a2e77b67e4e4368df56b54d6b00cc86398578f1c635f329"}, - {file = "psycopg2_binary-2.9.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:642df77484b2dcaf87d4237792246d8068653f9e0f5c025e2c692fc56b0dda70"}, - {file = "psycopg2_binary-2.9.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6a8b575ac45af1eaccbbcdcf710ab984fd50af048fe130672377f78aaff6fc1"}, - {file = "psycopg2_binary-2.9.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f955aa50d7d5220fcb6e38f69ea126eafecd812d96aeed5d5f3597f33fad43bb"}, - {file = "psycopg2_binary-2.9.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ad26d4eeaa0d722b25814cce97335ecf1b707630258f14ac4d2ed3d1d8415265"}, - {file = "psycopg2_binary-2.9.7-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ced63c054bdaf0298f62681d5dcae3afe60cbae332390bfb1acf0e23dcd25fc8"}, - {file = "psycopg2_binary-2.9.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2b04da24cbde33292ad34a40db9832a80ad12de26486ffeda883413c9e1b1d5e"}, - {file = "psycopg2_binary-2.9.7-cp39-cp39-win32.whl", hash = "sha256:18f12632ab516c47c1ac4841a78fddea6508a8284c7cf0f292cb1a523f2e2379"}, - {file = "psycopg2_binary-2.9.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb3b8d55924a6058a26db69fb1d3e7e32695ff8b491835ba9f479537e14dcf9f"}, -] - -[[package]] -name = "ptyprocess" -version = "0.7.0" -description = "Run a subprocess in a pseudo terminal" -optional = false -python-versions = "*" -files = [ - {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, - {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, -] - -[[package]] -name = "pure-eval" -version = "0.2.2" -description = "Safely evaluate AST nodes without side effects" -optional = false -python-versions = "*" -files = [ - {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, - {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, -] - -[package.extras] -tests = ["pytest"] - -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] - -[[package]] -name = "py-mini-racer" -version = "0.6.0" -description = "Minimal, modern embedded V8 for Python." -optional = false -python-versions = "*" -files = [ - {file = "py_mini_racer-0.6.0-py2.py3-none-macosx_10_10_x86_64.whl", hash = "sha256:346e73bb89a2024888244d487834be24a121089ceb0641dd0200cb96c4e24b57"}, - {file = "py_mini_racer-0.6.0-py2.py3-none-manylinux1_x86_64.whl", hash = "sha256:42896c24968481dd953eeeb11de331f6870917811961c9b26ba09071e07180e2"}, - {file = "py_mini_racer-0.6.0-py2.py3-none-win_amd64.whl", hash = "sha256:97cab31bbf63ce462ba4cd6e978c572c916d8b15586156c7c5e0b2e42c10baab"}, - {file = "py_mini_racer-0.6.0.tar.gz", hash = "sha256:f71e36b643d947ba698c57cd9bd2232c83ca997b0802fc2f7f79582377040c11"}, -] - -[[package]] -name = "pyasn1" -version = "0.5.0" -description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ - {file = "pyasn1-0.5.0-py2.py3-none-any.whl", hash = "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57"}, - {file = "pyasn1-0.5.0.tar.gz", hash = "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde"}, -] - -[[package]] -name = "pyasn1-modules" -version = "0.3.0" -description = "A collection of ASN.1-based protocols modules" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ - {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, - {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, -] - -[package.dependencies] -pyasn1 = ">=0.4.6,<0.6.0" - -[[package]] -name = "pycodestyle" -version = "2.11.0" -description = "Python style guide checker" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pycodestyle-2.11.0-py2.py3-none-any.whl", hash = "sha256:5d1013ba8dc7895b548be5afb05740ca82454fd899971563d2ef625d090326f8"}, - {file = "pycodestyle-2.11.0.tar.gz", hash = "sha256:259bcc17857d8a8b3b4a2327324b79e5f020a13c16074670f9c8c8f872ea76d0"}, -] - -[[package]] -name = "pycparser" -version = "2.21" -description = "C parser in Python" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, -] - -[[package]] -name = "pycryptodome" -version = "3.18.0" -description = "Cryptographic library for Python" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "pycryptodome-3.18.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:d1497a8cd4728db0e0da3c304856cb37c0c4e3d0b36fcbabcc1600f18504fc54"}, - {file = "pycryptodome-3.18.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:928078c530da78ff08e10eb6cada6e0dff386bf3d9fa9871b4bbc9fbc1efe024"}, - {file = "pycryptodome-3.18.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:157c9b5ba5e21b375f052ca78152dd309a09ed04703fd3721dce3ff8ecced148"}, - {file = "pycryptodome-3.18.0-cp27-cp27m-manylinux2014_aarch64.whl", hash = "sha256:d20082bdac9218649f6abe0b885927be25a917e29ae0502eaf2b53f1233ce0c2"}, - {file = "pycryptodome-3.18.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:e8ad74044e5f5d2456c11ed4cfd3e34b8d4898c0cb201c4038fe41458a82ea27"}, - {file = "pycryptodome-3.18.0-cp27-cp27m-win32.whl", hash = "sha256:62a1e8847fabb5213ccde38915563140a5b338f0d0a0d363f996b51e4a6165cf"}, - {file = "pycryptodome-3.18.0-cp27-cp27m-win_amd64.whl", hash = "sha256:16bfd98dbe472c263ed2821284118d899c76968db1a6665ade0c46805e6b29a4"}, - {file = "pycryptodome-3.18.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:7a3d22c8ee63de22336679e021c7f2386f7fc465477d59675caa0e5706387944"}, - {file = "pycryptodome-3.18.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:78d863476e6bad2a592645072cc489bb90320972115d8995bcfbee2f8b209918"}, - {file = "pycryptodome-3.18.0-cp27-cp27mu-manylinux2014_aarch64.whl", hash = "sha256:b6a610f8bfe67eab980d6236fdc73bfcdae23c9ed5548192bb2d530e8a92780e"}, - {file = "pycryptodome-3.18.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:422c89fd8df8a3bee09fb8d52aaa1e996120eafa565437392b781abec2a56e14"}, - {file = "pycryptodome-3.18.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:9ad6f09f670c466aac94a40798e0e8d1ef2aa04589c29faa5b9b97566611d1d1"}, - {file = "pycryptodome-3.18.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:53aee6be8b9b6da25ccd9028caf17dcdce3604f2c7862f5167777b707fbfb6cb"}, - {file = "pycryptodome-3.18.0-cp35-abi3-manylinux2014_aarch64.whl", hash = "sha256:10da29526a2a927c7d64b8f34592f461d92ae55fc97981aab5bbcde8cb465bb6"}, - {file = "pycryptodome-3.18.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f21efb8438971aa16924790e1c3dba3a33164eb4000106a55baaed522c261acf"}, - {file = "pycryptodome-3.18.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4944defabe2ace4803f99543445c27dd1edbe86d7d4edb87b256476a91e9ffa4"}, - {file = "pycryptodome-3.18.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:51eae079ddb9c5f10376b4131be9589a6554f6fd84f7f655180937f611cd99a2"}, - {file = "pycryptodome-3.18.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:83c75952dcf4a4cebaa850fa257d7a860644c70a7cd54262c237c9f2be26f76e"}, - {file = "pycryptodome-3.18.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:957b221d062d5752716923d14e0926f47670e95fead9d240fa4d4862214b9b2f"}, - {file = "pycryptodome-3.18.0-cp35-abi3-win32.whl", hash = "sha256:795bd1e4258a2c689c0b1f13ce9684fa0dd4c0e08680dcf597cf9516ed6bc0f3"}, - {file = "pycryptodome-3.18.0-cp35-abi3-win_amd64.whl", hash = "sha256:b1d9701d10303eec8d0bd33fa54d44e67b8be74ab449052a8372f12a66f93fb9"}, - {file = "pycryptodome-3.18.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:cb1be4d5af7f355e7d41d36d8eec156ef1382a88638e8032215c215b82a4b8ec"}, - {file = "pycryptodome-3.18.0-pp27-pypy_73-win32.whl", hash = "sha256:fc0a73f4db1e31d4a6d71b672a48f3af458f548059aa05e83022d5f61aac9c08"}, - {file = "pycryptodome-3.18.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f022a4fd2a5263a5c483a2bb165f9cb27f2be06f2f477113783efe3fe2ad887b"}, - {file = "pycryptodome-3.18.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:363dd6f21f848301c2dcdeb3c8ae5f0dee2286a5e952a0f04954b82076f23825"}, - {file = "pycryptodome-3.18.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12600268763e6fec3cefe4c2dcdf79bde08d0b6dc1813887e789e495cb9f3403"}, - {file = "pycryptodome-3.18.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4604816adebd4faf8810782f137f8426bf45fee97d8427fa8e1e49ea78a52e2c"}, - {file = "pycryptodome-3.18.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:01489bbdf709d993f3058e2996f8f40fee3f0ea4d995002e5968965fa2fe89fb"}, - {file = "pycryptodome-3.18.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3811e31e1ac3069988f7a1c9ee7331b942e605dfc0f27330a9ea5997e965efb2"}, - {file = "pycryptodome-3.18.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f4b967bb11baea9128ec88c3d02f55a3e338361f5e4934f5240afcb667fdaec"}, - {file = "pycryptodome-3.18.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:9c8eda4f260072f7dbe42f473906c659dcbadd5ae6159dfb49af4da1293ae380"}, - {file = "pycryptodome-3.18.0.tar.gz", hash = "sha256:c9adee653fc882d98956e33ca2c1fb582e23a8af7ac82fee75bd6113c55a0413"}, -] - -[[package]] -name = "pyflakes" -version = "3.1.0" -description = "passive checker of Python programs" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, - {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, -] - -[[package]] -name = "pygments" -version = "2.16.1" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.7" -files = [ - {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, - {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, -] - -[package.extras] -plugins = ["importlib-metadata"] - -[[package]] -name = "pyjwt" -version = "2.8.0" -description = "JSON Web Token implementation in Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, - {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, -] - -[package.extras] -crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] - -[[package]] -name = "pyopenssl" -version = "23.2.0" -description = "Python wrapper module around the OpenSSL library" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pyOpenSSL-23.2.0-py3-none-any.whl", hash = "sha256:24f0dc5227396b3e831f4c7f602b950a5e9833d292c8e4a2e06b709292806ae2"}, - {file = "pyOpenSSL-23.2.0.tar.gz", hash = "sha256:276f931f55a452e7dea69c7173e984eb2a4407ce413c918aa34b55f82f9b8bac"}, -] - -[package.dependencies] -cryptography = ">=38.0.0,<40.0.0 || >40.0.0,<40.0.1 || >40.0.1,<42" - -[package.extras] -docs = ["sphinx (!=5.2.0,!=5.2.0.post0)", "sphinx-rtd-theme"] -test = ["flaky", "pretend", "pytest (>=3.0.1)"] - -[[package]] -name = "pypng" -version = "0.20220715.0" -description = "Pure Python library for saving and loading PNG images" -optional = false -python-versions = "*" -files = [ - {file = "pypng-0.20220715.0-py3-none-any.whl", hash = "sha256:4a43e969b8f5aaafb2a415536c1a8ec7e341cd6a3f957fd5b5f32a4cfeed902c"}, - {file = "pypng-0.20220715.0.tar.gz", hash = "sha256:739c433ba96f078315de54c0db975aee537cbc3e1d0ae4ed9aab0ca1e427e2c1"}, -] - -[[package]] -name = "pyproject-api" -version = "1.5.3" -description = "API to interact with the python pyproject.toml based projects" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyproject_api-1.5.3-py3-none-any.whl", hash = "sha256:14cf09828670c7b08842249c1f28c8ee6581b872e893f81b62d5465bec41502f"}, - {file = "pyproject_api-1.5.3.tar.gz", hash = "sha256:ffb5b2d7cad43f5b2688ab490de7c4d3f6f15e0b819cb588c4b771567c9729eb"}, -] - -[package.dependencies] -packaging = ">=23.1" -tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} - -[package.extras] -docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -testing = ["covdefaults (>=2.3)", "importlib-metadata (>=6.6)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "setuptools (>=67.8)", "wheel (>=0.40)"] - -[[package]] -name = "pyquery" -version = "2.0.0" -description = "A jquery-like library for python" -optional = false -python-versions = "*" -files = [ - {file = "pyquery-2.0.0-py3-none-any.whl", hash = "sha256:8dfc9b4b7c5f877d619bbae74b1898d5743f6ca248cfd5d72b504dd614da312f"}, - {file = "pyquery-2.0.0.tar.gz", hash = "sha256:963e8d4e90262ff6d8dec072ea97285dc374a2f69cad7776f4082abcf6a1d8ae"}, -] - -[package.dependencies] -cssselect = ">=1.2.0" -lxml = ">=2.1" - -[package.extras] -test = ["pytest", "pytest-cov", "requests", "webob", "webtest"] - -[[package]] -name = "pyreadline" -version = "2.1" -description = "A python implmementation of GNU readline." -optional = false -python-versions = "*" -files = [ - {file = "pyreadline-2.1.zip", hash = "sha256:4530592fc2e85b25b1a9f79664433da09237c1a270e4d78ea5aa3a2c7229e2d1"}, -] - -[[package]] -name = "pyrepl" -version = "0.9.0" -description = "A library for building flexible command line interfaces" -optional = false -python-versions = "*" -files = [ - {file = "pyrepl-0.9.0.tar.gz", hash = "sha256:292570f34b5502e871bbb966d639474f2b57fbfcd3373c2d6a2f3d56e681a775"}, -] - -[[package]] -name = "pysocks" -version = "1.7.1" -description = "A Python SOCKS client module. See https://github.com/Anorov/PySocks for more information." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "PySocks-1.7.1-py27-none-any.whl", hash = "sha256:08e69f092cc6dbe92a0fdd16eeb9b9ffbc13cadfe5ca4c7bd92ffb078b293299"}, - {file = "PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5"}, - {file = "PySocks-1.7.1.tar.gz", hash = "sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0"}, -] - -[[package]] -name = "pytest" -version = "6.2.5" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, - {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, -] - -[package.dependencies] -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} -attrs = ">=19.2.0" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -py = ">=1.8.2" -toml = "*" - -[package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] - -[[package]] -name = "pytest-asyncio" -version = "0.20.3" -description = "Pytest support for asyncio" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-asyncio-0.20.3.tar.gz", hash = "sha256:83cbf01169ce3e8eb71c6c278ccb0574d1a7a3bb8eaaf5e50e0ad342afb33b36"}, - {file = "pytest_asyncio-0.20.3-py3-none-any.whl", hash = "sha256:f129998b209d04fcc65c96fc85c11e5316738358909a8399e93be553d7656442"}, -] - -[package.dependencies] -pytest = ">=6.1.0" - -[package.extras] -docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] -testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] - -[[package]] -name = "pytest-base-url" -version = "2.0.0" -description = "pytest plugin for URL based testing" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "pytest-base-url-2.0.0.tar.gz", hash = "sha256:e1e88a4fd221941572ccdcf3bf6c051392d2f8b6cef3e0bc7da95abec4b5346e"}, - {file = "pytest_base_url-2.0.0-py3-none-any.whl", hash = "sha256:ed36fd632c32af9f1c08f2c2835dcf42ca8fcd097d6ed44a09f253d365ad8297"}, -] - -[package.dependencies] -pytest = ">=3.0.0,<8.0.0" -requests = ">=2.9" - -[[package]] -name = "pytest-cov" -version = "4.1.0" -description = "Pytest plugin for measuring coverage." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, - {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, -] - -[package.dependencies] -coverage = {version = ">=5.2.1", extras = ["toml"]} -pytest = ">=4.6" - -[package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] - -[[package]] -name = "pytest-cover" -version = "3.0.0" -description = "Pytest plugin for measuring coverage. Forked from `pytest-cov`." -optional = false -python-versions = "*" -files = [ - {file = "pytest-cover-3.0.0.tar.gz", hash = "sha256:5bdb6c1cc3dd75583bb7bc2c57f5e1034a1bfcb79d27c71aceb0b16af981dbf4"}, - {file = "pytest_cover-3.0.0-py2.py3-none-any.whl", hash = "sha256:578249955eb3b5f3991209df6e532bb770b647743b7392d3d97698dc02f39ebb"}, -] - -[package.dependencies] -pytest-cov = ">=2.0" - -[[package]] -name = "pytest-coverage" -version = "0.0" -description = "Pytest plugin for measuring coverage. Forked from `pytest-cov`." -optional = false -python-versions = "*" -files = [ - {file = "pytest-coverage-0.0.tar.gz", hash = "sha256:db6af2cbd7e458c7c9fd2b4207cee75258243c8a81cad31a7ee8cfad5be93c05"}, - {file = "pytest_coverage-0.0-py2.py3-none-any.whl", hash = "sha256:dedd084c5e74d8e669355325916dc011539b190355021b037242514dee546368"}, -] - -[package.dependencies] -pytest-cover = "*" - -[[package]] -name = "pytest-django" -version = "4.5.2" -description = "A Django plugin for pytest." -optional = false -python-versions = ">=3.5" -files = [ - {file = "pytest-django-4.5.2.tar.gz", hash = "sha256:d9076f759bb7c36939dbdd5ae6633c18edfc2902d1a69fdbefd2426b970ce6c2"}, - {file = "pytest_django-4.5.2-py3-none-any.whl", hash = "sha256:c60834861933773109334fe5a53e83d1ef4828f2203a1d6a0fa9972f4f75ab3e"}, -] - -[package.dependencies] -pytest = ">=5.4.0" - -[package.extras] -docs = ["sphinx", "sphinx-rtd-theme"] -testing = ["Django", "django-configurations (>=2.0)"] - -[[package]] -name = "pytest-echo" -version = "1.7.1" -description = "pytest plugin with mechanisms for echoing environment variables, package version and generic attributes" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "pytest-echo-1.7.1.tar.gz", hash = "sha256:3bf6a065ca624a5915bc2a820505a44735a8aa11865a57d8214d4eac18960191"}, -] - -[package.dependencies] -pytest = ">=2.2" - -[[package]] -name = "pytest-html" -version = "3.2.0" -description = "pytest plugin for generating HTML reports" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pytest-html-3.2.0.tar.gz", hash = "sha256:c4e2f4bb0bffc437f51ad2174a8a3e71df81bbc2f6894604e604af18fbe687c3"}, - {file = "pytest_html-3.2.0-py3-none-any.whl", hash = "sha256:868c08564a68d8b2c26866f1e33178419bb35b1e127c33784a28622eb827f3f3"}, -] - -[package.dependencies] -py = ">=1.8.2" -pytest = ">=5.0,<6.0.0 || >6.0.0" -pytest-metadata = "*" - -[[package]] -name = "pytest-metadata" -version = "2.0.4" -description = "pytest plugin for test session metadata" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "pytest_metadata-2.0.4-py3-none-any.whl", hash = "sha256:acb739f89fabb3d798c099e9e0c035003062367a441910aaaf2281bc1972ee14"}, - {file = "pytest_metadata-2.0.4.tar.gz", hash = "sha256:fcc653f65fe3035b478820b5284fbf0f52803622ee3f60a2faed7a7d3ba1f41e"}, -] - -[package.dependencies] -pytest = ">=3.0.0,<8.0.0" - -[[package]] -name = "pytest-pythonpath" -version = "0.7.4" -description = "pytest plugin for adding to the PYTHONPATH from command line or configs." -optional = false -python-versions = ">=2.6, <4" -files = [ - {file = "pytest-pythonpath-0.7.4.tar.gz", hash = "sha256:64e195b23a8f8c0c631fb16882d9ad6fa4137ed1f2961ddd15d52065cd435db6"}, - {file = "pytest_pythonpath-0.7.4-py3-none-any.whl", hash = "sha256:e73e11dab2f0b83e73229e261242b251f0a369d7f527dbfec068822fd26a6ce5"}, -] - -[package.dependencies] -pytest = ">=2.5.2,<7" - -[[package]] -name = "pytest-selenium" -version = "2.0.1" -description = "pytest plugin for Selenium" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pytest-selenium-2.0.1.tar.gz", hash = "sha256:a0008e6dce7c68501369c1c543420f5906ffada493d4ff0c5d9d5ccdf4022203"}, - {file = "pytest_selenium-2.0.1-py3-none-any.whl", hash = "sha256:fd632e0b657be6360f6319445eb0f475872d488b67634f791561851d55e390b1"}, -] - -[package.dependencies] -pytest = ">=5.0.0" -pytest-base-url = "*" -pytest-html = ">=1.14.0" -pytest-variables = ">=1.5.0" -requests = "*" -selenium = ">=3.0.0" -tenacity = ">=6,<7" - -[package.extras] -appium = ["appium-python-client (>=0.44)"] - -[[package]] -name = "pytest-variables" -version = "2.0.0" -description = "pytest plugin for providing variables to tests/fixtures" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "pytest-variables-2.0.0.tar.gz", hash = "sha256:1c9e4fc321e33be7d1b352ac9cf20fdd2c39a8e4e6fa2dcd042aaf70ed516be7"}, - {file = "pytest_variables-2.0.0-py3-none-any.whl", hash = "sha256:1a24a30b7acf9654d71bcdc8b10c1eb0d81b73b3eec72d810703c522475d643b"}, -] - -[package.dependencies] -pytest = ">=3.0.0,<8.0.0" - -[package.extras] -hjson = ["hjson"] -toml = ["toml"] -yaml = ["PyYAML"] - -[[package]] -name = "python-dateutil" -version = "2.8.2" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "python3-openid" -version = "3.2.0" -description = "OpenID support for modern servers and consumers." -optional = false -python-versions = "*" -files = [ - {file = "python3-openid-3.2.0.tar.gz", hash = "sha256:33fbf6928f401e0b790151ed2b5290b02545e8775f982485205a066f874aaeaf"}, - {file = "python3_openid-3.2.0-py3-none-any.whl", hash = "sha256:6626f771e0417486701e0b4daff762e7212e820ca5b29fcc0d05f6f8736dfa6b"}, -] - -[package.dependencies] -defusedxml = "*" - -[package.extras] -mysql = ["mysql-connector-python"] -postgresql = ["psycopg2"] - -[[package]] -name = "pytz" -version = "2023.3" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, - {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.1" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, -] - -[[package]] -name = "qrcode" -version = "7.4.2" -description = "QR Code image generator" -optional = false -python-versions = ">=3.7" -files = [ - {file = "qrcode-7.4.2-py3-none-any.whl", hash = "sha256:581dca7a029bcb2deef5d01068e39093e80ef00b4a61098a2182eac59d01643a"}, - {file = "qrcode-7.4.2.tar.gz", hash = "sha256:9dd969454827e127dbd93696b20747239e6d540e082937c90f14ac95b30f5845"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} -pypng = "*" -typing-extensions = "*" - -[package.extras] -all = ["pillow (>=9.1.0)", "pytest", "pytest-cov", "tox", "zest.releaser[recommended]"] -dev = ["pytest", "pytest-cov", "tox"] -maintainer = ["zest.releaser[recommended]"] -pil = ["pillow (>=9.1.0)"] -test = ["coverage", "pytest"] - -[[package]] -name = "redis" -version = "5.0.0" -description = "Python client for Redis database and key-value store" -optional = false -python-versions = ">=3.7" -files = [ - {file = "redis-5.0.0-py3-none-any.whl", hash = "sha256:06570d0b2d84d46c21defc550afbaada381af82f5b83e5b3777600e05d8e2ed0"}, - {file = "redis-5.0.0.tar.gz", hash = "sha256:5cea6c0d335c9a7332a460ed8729ceabb4d0c489c7285b0a86dbbf8a017bd120"}, -] - -[package.dependencies] -async-timeout = {version = ">=4.0.2", markers = "python_full_version <= \"3.11.2\""} - -[package.extras] -hiredis = ["hiredis (>=1.0.0)"] -ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] - -[[package]] -name = "requests" -version = "2.31.0" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.7" -files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "requests-oauthlib" -version = "1.3.1" -description = "OAuthlib authentication support for Requests." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, - {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, -] - -[package.dependencies] -oauthlib = ">=3.0.0" -requests = ">=2.0.0" - -[package.extras] -rsa = ["oauthlib[signedtoken] (>=3.0.0)"] - -[[package]] -name = "selenium" -version = "4.11.2" -description = "" -optional = false -python-versions = ">=3.7" -files = [ - {file = "selenium-4.11.2-py3-none-any.whl", hash = "sha256:98e72117b194b3fa9c69b48998f44bf7dd4152c7bd98544911a1753b9f03cc7d"}, - {file = "selenium-4.11.2.tar.gz", hash = "sha256:9f9a5ed586280a3594f7461eb1d9dab3eac9d91e28572f365e9b98d9d03e02b5"}, -] - -[package.dependencies] -certifi = ">=2021.10.8" -trio = ">=0.17,<1.0" -trio-websocket = ">=0.9,<1.0" -urllib3 = {version = ">=1.26,<3", extras = ["socks"]} - -[[package]] -name = "sentry-sdk" -version = "1.29.2" -description = "Python client for Sentry (https://sentry.io)" -optional = false -python-versions = "*" -files = [ - {file = "sentry-sdk-1.29.2.tar.gz", hash = "sha256:a99ee105384788c3f228726a88baf515fe7b5f1d2d0f215a03d194369f158df7"}, - {file = "sentry_sdk-1.29.2-py2.py3-none-any.whl", hash = "sha256:3e17215d8006612e2df02b0e73115eb8376c37e3f586d8436fa41644e605074d"}, -] - -[package.dependencies] -certifi = "*" -urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""} - -[package.extras] -aiohttp = ["aiohttp (>=3.5)"] -arq = ["arq (>=0.23)"] -beam = ["apache-beam (>=2.12)"] -bottle = ["bottle (>=0.12.13)"] -celery = ["celery (>=3)"] -chalice = ["chalice (>=1.16.0)"] -django = ["django (>=1.8)"] -falcon = ["falcon (>=1.4)"] -fastapi = ["fastapi (>=0.79.0)"] -flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"] -grpcio = ["grpcio (>=1.21.1)"] -httpx = ["httpx (>=0.16.0)"] -huey = ["huey (>=2)"] -loguru = ["loguru (>=0.5)"] -opentelemetry = ["opentelemetry-distro (>=0.35b0)"] -pure-eval = ["asttokens", "executing", "pure-eval"] -pymongo = ["pymongo (>=3.1)"] -pyspark = ["pyspark (>=2.4.4)"] -quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] -rq = ["rq (>=0.6)"] -sanic = ["sanic (>=0.8)"] -sqlalchemy = ["sqlalchemy (>=1.2)"] -starlette = ["starlette (>=0.19.1)"] -starlite = ["starlite (>=1.48)"] -tornado = ["tornado (>=5)"] - -[[package]] -name = "service-identity" -version = "23.1.0" -description = "Service identity verification for pyOpenSSL & cryptography." -optional = false -python-versions = ">=3.8" -files = [ - {file = "service_identity-23.1.0-py3-none-any.whl", hash = "sha256:87415a691d52fcad954a500cb81f424d0273f8e7e3ee7d766128f4575080f383"}, - {file = "service_identity-23.1.0.tar.gz", hash = "sha256:ecb33cd96307755041e978ab14f8b14e13b40f1fbd525a4dc78f46d2b986431d"}, -] - -[package.dependencies] -attrs = ">=19.1.0" -cryptography = "*" -pyasn1 = "*" -pyasn1-modules = "*" - -[package.extras] -dev = ["pyopenssl", "service-identity[docs,idna,mypy,tests]"] -docs = ["furo", "myst-parser", "pyopenssl", "sphinx", "sphinx-notfound-page"] -idna = ["idna"] -mypy = ["idna", "mypy", "types-pyopenssl"] -tests = ["coverage[toml] (>=5.0.2)", "pytest"] - -[[package]] -name = "setuptools" -version = "65.7.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "setuptools-65.7.0-py3-none-any.whl", hash = "sha256:8ab4f1dbf2b4a65f7eec5ad0c620e84c34111a68d3349833494b9088212214dd"}, - {file = "setuptools-65.7.0.tar.gz", hash = "sha256:4d3c92fac8f1118bb77a22181355e29c239cabfe2b9effdaa665c66b711136d7"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "sniffio" -version = "1.3.0" -description = "Sniff out which async library your code is running under" -optional = false -python-versions = ">=3.7" -files = [ - {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, - {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, -] - -[[package]] -name = "social-auth-app-django" -version = "5.2.0" -description = "Python Social Authentication, Django integration." -optional = false -python-versions = ">=3.7" -files = [ - {file = "social-auth-app-django-5.2.0.tar.gz", hash = "sha256:4a5dae406f3874b4003708ff120c02cb1a4c8eeead56cd163646347309fcd0f8"}, - {file = "social_auth_app_django-5.2.0-py3-none-any.whl", hash = "sha256:0347ca4cd23ea9d15a665da9d22950552fb66b95600e6c2ebae38ca883b3a4ed"}, -] - -[package.dependencies] -Django = ">=3.2" -social-auth-core = ">=4.4.1" - -[[package]] -name = "social-auth-core" -version = "4.4.2" -description = "Python social authentication made simple." -optional = false -python-versions = ">=3.6" -files = [ - {file = "social-auth-core-4.4.2.tar.gz", hash = "sha256:9791d7c7aee2ac8517fe7a2ea2f942a8a5492b3a4ccb44a9b0dacc87d182f2aa"}, - {file = "social_auth_core-4.4.2-py3-none-any.whl", hash = "sha256:ea7a19c46b791b767e95f467881b53c5fd0d1efb40048d9ed3dbc46daa05c954"}, -] - -[package.dependencies] -cryptography = ">=1.4" -defusedxml = ">=0.5.0rc1" -oauthlib = ">=1.0.3" -PyJWT = ">=2.0.0" -python3-openid = ">=3.0.10" -requests = ">=2.9.1" -requests-oauthlib = ">=0.6.1" - -[package.extras] -all = ["cryptography (>=2.1.1)", "python-jose (>=3.0.0)", "python3-saml (>=1.5.0)"] -allpy3 = ["cryptography (>=2.1.1)", "python-jose (>=3.0.0)", "python3-saml (>=1.5.0)"] -azuread = ["cryptography (>=2.1.1)"] -openidconnect = ["python-jose (>=3.0.0)"] -saml = ["python3-saml (>=1.5.0)"] - -[[package]] -name = "sortedcontainers" -version = "2.4.0" -description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" -optional = false -python-versions = "*" -files = [ - {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, - {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, -] - -[[package]] -name = "soupsieve" -version = "2.4.1" -description = "A modern CSS selector implementation for Beautiful Soup." -optional = false -python-versions = ">=3.7" -files = [ - {file = "soupsieve-2.4.1-py3-none-any.whl", hash = "sha256:1c1bfee6819544a3447586c889157365a27e10d88cde3ad3da0cf0ddf646feb8"}, - {file = "soupsieve-2.4.1.tar.gz", hash = "sha256:89d12b2d5dfcd2c9e8c22326da9d9aa9cb3dfab0a83a024f05704076ee8d35ea"}, -] - -[[package]] -name = "sqlparse" -version = "0.4.4" -description = "A non-validating SQL parser." -optional = false -python-versions = ">=3.5" -files = [ - {file = "sqlparse-0.4.4-py3-none-any.whl", hash = "sha256:5430a4fe2ac7d0f93e66f1efc6e1338a41884b7ddf2a350cedd20ccc4d9d28f3"}, - {file = "sqlparse-0.4.4.tar.gz", hash = "sha256:d446183e84b8349fa3061f0fe7f06ca94ba65b426946ffebe6e3e8295332420c"}, -] - -[package.extras] -dev = ["build", "flake8"] -doc = ["sphinx"] -test = ["pytest", "pytest-cov"] - -[[package]] -name = "stack-data" -version = "0.6.2" -description = "Extract data from python stack frames and tracebacks for informative displays" -optional = false -python-versions = "*" -files = [ - {file = "stack_data-0.6.2-py3-none-any.whl", hash = "sha256:cbb2a53eb64e5785878201a97ed7c7b94883f48b87bfb0bbe8b623c74679e4a8"}, - {file = "stack_data-0.6.2.tar.gz", hash = "sha256:32d2dd0376772d01b6cb9fc996f3c8b57a357089dec328ed4b6553d037eaf815"}, -] - -[package.dependencies] -asttokens = ">=2.1.0" -executing = ">=1.2.0" -pure-eval = "*" - -[package.extras] -tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] - -[[package]] -name = "tablib" -version = "3.5.0" -description = "Format agnostic tabular data library (XLS, JSON, YAML, CSV, etc.)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tablib-3.5.0-py3-none-any.whl", hash = "sha256:9821caa9eca6062ff7299fa645e737aecff982e6b2b42046928a6413c8dabfd9"}, - {file = "tablib-3.5.0.tar.gz", hash = "sha256:f6661dfc45e1d4f51fa8a6239f9c8349380859a5bfaa73280645f046d6c96e33"}, -] - -[package.dependencies] -markuppy = {version = "*", optional = true, markers = "extra == \"html\""} -odfpy = {version = "*", optional = true, markers = "extra == \"ods\""} -openpyxl = {version = ">=2.6.0", optional = true, markers = "extra == \"xlsx\""} -pyyaml = {version = "*", optional = true, markers = "extra == \"yaml\""} -xlrd = {version = "*", optional = true, markers = "extra == \"xls\""} -xlwt = {version = "*", optional = true, markers = "extra == \"xls\""} - -[package.extras] -all = ["markuppy", "odfpy", "openpyxl (>=2.6.0)", "pandas", "pyyaml", "tabulate", "xlrd", "xlwt"] -cli = ["tabulate"] -html = ["markuppy"] -ods = ["odfpy"] -pandas = ["pandas"] -xls = ["xlrd", "xlwt"] -xlsx = ["openpyxl (>=2.6.0)"] -yaml = ["pyyaml"] - -[[package]] -name = "tenacity" -version = "6.3.1" -description = "Retry code until it succeeds" -optional = false -python-versions = "*" -files = [ - {file = "tenacity-6.3.1-py2.py3-none-any.whl", hash = "sha256:baed357d9f35ec64264d8a4bbf004c35058fad8795c5b0d8a7dc77ecdcbb8f39"}, - {file = "tenacity-6.3.1.tar.gz", hash = "sha256:e14d191fb0a309b563904bbc336582efe2037de437e543b38da749769b544d7f"}, -] - -[package.dependencies] -six = ">=1.9.0" - -[package.extras] -doc = ["reno", "sphinx", "tornado (>=4.5)"] - -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - -[[package]] -name = "tox" -version = "4.8.0" -description = "tox is a generic virtualenv management and test command line tool" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tox-4.8.0-py3-none-any.whl", hash = "sha256:4991305a56983d750a0d848a34242be290452aa88d248f1bf976e4036ee8b213"}, - {file = "tox-4.8.0.tar.gz", hash = "sha256:2adacf435b12ccf10b9dfa9975d8ec0afd7cbae44d300463140d2117b968037b"}, -] - -[package.dependencies] -cachetools = ">=5.3.1" -chardet = ">=5.1" -colorama = ">=0.4.6" -filelock = ">=3.12.2" -packaging = ">=23.1" -platformdirs = ">=3.9.1" -pluggy = ">=1.2" -pyproject-api = ">=1.5.3" -tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} -virtualenv = ">=20.24.1" - -[package.extras] -docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-argparse-cli (>=1.11.1)", "sphinx-autodoc-typehints (>=1.23.3,!=1.23.4)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -testing = ["build[virtualenv] (>=0.10)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.1.1)", "devpi-process (>=0.3.1)", "diff-cover (>=7.7)", "distlib (>=0.3.7)", "flaky (>=3.7)", "hatch-vcs (>=0.3)", "hatchling (>=1.17.1)", "psutil (>=5.9.5)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-xdist (>=3.3.1)", "re-assert (>=1.1)", "time-machine (>=2.10)", "wheel (>=0.40)"] - -[[package]] -name = "traitlets" -version = "5.9.0" -description = "Traitlets Python configuration system" -optional = false -python-versions = ">=3.7" -files = [ - {file = "traitlets-5.9.0-py3-none-any.whl", hash = "sha256:9e6ec080259b9a5940c797d58b613b5e31441c2257b87c2e795c5228ae80d2d8"}, - {file = "traitlets-5.9.0.tar.gz", hash = "sha256:f6cde21a9c68cf756af02035f72d5a723bf607e862e7be33ece505abf4a3bad9"}, -] - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] - -[[package]] -name = "trio" -version = "0.22.2" -description = "A friendly Python library for async concurrency and I/O" -optional = false -python-versions = ">=3.7" -files = [ - {file = "trio-0.22.2-py3-none-any.whl", hash = "sha256:f43da357620e5872b3d940a2e3589aa251fd3f881b65a608d742e00809b1ec38"}, - {file = "trio-0.22.2.tar.gz", hash = "sha256:3887cf18c8bcc894433420305468388dac76932e9668afa1c49aa3806b6accb3"}, -] - -[package.dependencies] -attrs = ">=20.1.0" -cffi = {version = ">=1.14", markers = "os_name == \"nt\" and implementation_name != \"pypy\""} -exceptiongroup = {version = ">=1.0.0rc9", markers = "python_version < \"3.11\""} -idna = "*" -outcome = "*" -sniffio = "*" -sortedcontainers = "*" - -[[package]] -name = "trio-websocket" -version = "0.10.3" -description = "WebSocket library for Trio" -optional = false -python-versions = ">=3.7" -files = [ - {file = "trio-websocket-0.10.3.tar.gz", hash = "sha256:1a748604ad906a7dcab9a43c6eb5681e37de4793ba0847ef0bc9486933ed027b"}, - {file = "trio_websocket-0.10.3-py3-none-any.whl", hash = "sha256:a9937d48e8132ebf833019efde2a52ca82d223a30a7ea3e8d60a7d28f75a4e3a"}, -] - -[package.dependencies] -exceptiongroup = "*" -trio = ">=0.11" -wsproto = ">=0.14" - -[[package]] -name = "twisted" -version = "22.10.0" -description = "An asynchronous networking framework written in Python" -optional = false -python-versions = ">=3.7.1" -files = [ - {file = "Twisted-22.10.0-py3-none-any.whl", hash = "sha256:86c55f712cc5ab6f6d64e02503352464f0400f66d4f079096d744080afcccbd0"}, - {file = "Twisted-22.10.0.tar.gz", hash = "sha256:32acbd40a94f5f46e7b42c109bfae2b302250945561783a8b7a059048f2d4d31"}, -] - -[package.dependencies] -attrs = ">=19.2.0" -Automat = ">=0.8.0" -constantly = ">=15.1" -hyperlink = ">=17.1.1" -idna = {version = ">=2.4", optional = true, markers = "extra == \"tls\""} -incremental = ">=21.3.0" -pyopenssl = {version = ">=21.0.0", optional = true, markers = "extra == \"tls\""} -service-identity = {version = ">=18.1.0", optional = true, markers = "extra == \"tls\""} -twisted-iocpsupport = {version = ">=1.0.2,<2", markers = "platform_system == \"Windows\""} -typing-extensions = ">=3.6.5" -"zope.interface" = ">=4.4.2" - -[package.extras] -all-non-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] -conch = ["appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "cryptography (>=2.6)", "pyasn1"] -conch-nacl = ["PyNaCl", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "cryptography (>=2.6)", "pyasn1"] -contextvars = ["contextvars (>=2.4,<3)"] -dev = ["coverage (>=6b1,<7)", "pydoctor (>=22.9.0,<22.10.0)", "pyflakes (>=2.2,<3.0)", "python-subunit (>=1.4,<2.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=5.0,<6)", "sphinx-rtd-theme (>=1.0,<2.0)", "towncrier (>=22.8,<23.0)", "twistedchecker (>=0.7,<1.0)"] -dev-release = ["pydoctor (>=22.9.0,<22.10.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=5.0,<6)", "sphinx-rtd-theme (>=1.0,<2.0)", "towncrier (>=22.8,<23.0)"] -gtk-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pygobject", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] -http2 = ["h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)"] -macos-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyobjc-core", "pyobjc-framework-CFNetwork", "pyobjc-framework-Cocoa", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] -mypy = ["PyHamcrest (>=1.9.0)", "PyNaCl", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "coverage (>=6b1,<7)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "mypy (==0.930)", "mypy-zope (==0.3.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pydoctor (>=22.9.0,<22.10.0)", "pyflakes (>=2.2,<3.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "python-subunit (>=1.4,<2.0)", "pywin32 (!=226)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "service-identity (>=18.1.0)", "sphinx (>=5.0,<6)", "sphinx-rtd-theme (>=1.0,<2.0)", "towncrier (>=22.8,<23.0)", "twistedchecker (>=0.7,<1.0)", "types-pyOpenSSL", "types-setuptools"] -osx-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyobjc-core", "pyobjc-framework-CFNetwork", "pyobjc-framework-Cocoa", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] -serial = ["pyserial (>=3.0)", "pywin32 (!=226)"] -test = ["PyHamcrest (>=1.9.0)", "cython-test-exception-raiser (>=1.0.2,<2)", "hypothesis (>=6.0,<7.0)"] -tls = ["idna (>=2.4)", "pyopenssl (>=21.0.0)", "service-identity (>=18.1.0)"] -windows-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] - -[[package]] -name = "twisted-iocpsupport" -version = "1.0.4" -description = "An extension for use in the twisted I/O Completion Ports reactor." -optional = false -python-versions = "*" -files = [ - {file = "twisted-iocpsupport-1.0.4.tar.gz", hash = "sha256:858096c0d15e33f15ac157f455d8f86f2f2cdd223963e58c0f682a3af8362d89"}, - {file = "twisted_iocpsupport-1.0.4-cp310-cp310-win32.whl", hash = "sha256:afa2b630797f9ed2f27f3d9f55e3f72b4244911e45a8c82756f44babbf0b243e"}, - {file = "twisted_iocpsupport-1.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:0058c963c8957bcd3deda62122e89953c9de1e867a274facc9b15dde1a9f31e8"}, - {file = "twisted_iocpsupport-1.0.4-cp311-cp311-win32.whl", hash = "sha256:196f7c7ccad4ba4d1783b1c4e1d1b22d93c04275cd780bf7498d16c77319ad6e"}, - {file = "twisted_iocpsupport-1.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:4e5f97bcbabdd79cbaa969b63439b89801ea560f11d42b0a387634275c633623"}, - {file = "twisted_iocpsupport-1.0.4-cp312-cp312-win32.whl", hash = "sha256:6081bd7c2f4fcf9b383dcdb3b3385d75a26a7c9d2be25b6950c3d8ea652d2d2d"}, - {file = "twisted_iocpsupport-1.0.4-cp312-cp312-win_amd64.whl", hash = "sha256:76f7e67cec1f1d097d1f4ed7de41be3d74546e1a4ede0c7d56e775c4dce5dfb0"}, - {file = "twisted_iocpsupport-1.0.4-cp36-cp36m-win32.whl", hash = "sha256:3d306fc4d88a6bcf61ce9d572c738b918578121bfd72891625fab314549024b5"}, - {file = "twisted_iocpsupport-1.0.4-cp36-cp36m-win_amd64.whl", hash = "sha256:391ac4d6002a80e15f35adc4ad6056f4fe1c17ceb0d1f98ba01b0f4f917adfd7"}, - {file = "twisted_iocpsupport-1.0.4-cp37-cp37m-win32.whl", hash = "sha256:0c1b5cf37f0b2d96cc3c9bc86fff16613b9f5d0ca565c96cf1f1fb8cfca4b81c"}, - {file = "twisted_iocpsupport-1.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:3c5dc11d72519e55f727320e3cee535feedfaee09c0f0765ed1ca7badff1ab3c"}, - {file = "twisted_iocpsupport-1.0.4-cp38-cp38-win32.whl", hash = "sha256:cc86c2ef598c15d824a243c2541c29459881c67fc3c0adb6efe2242f8f0ec3af"}, - {file = "twisted_iocpsupport-1.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:c27985e949b9b1a1fb4c20c71d315c10ea0f93fdf3ccdd4a8c158b5926edd8c8"}, - {file = "twisted_iocpsupport-1.0.4-cp39-cp39-win32.whl", hash = "sha256:e311dfcb470696e3c077249615893cada598e62fa7c4e4ca090167bd2b7d331f"}, - {file = "twisted_iocpsupport-1.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:4574eef1f3bb81501fb02f911298af3c02fe8179c31a33b361dd49180c3e644d"}, - {file = "twisted_iocpsupport-1.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:872747a3b64e2909aee59c803ccd0bceb9b75bf27915520ebd32d69687040fa2"}, - {file = "twisted_iocpsupport-1.0.4-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:c2712b778bacf1db434e3e065adfed3db300754186a29aecac1efae9ef4bcaff"}, - {file = "twisted_iocpsupport-1.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7c66fa0aa4236b27b3c61cb488662d85dae746a6d1c7b0d91cf7aae118445adf"}, - {file = "twisted_iocpsupport-1.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:300437af17396a945a58dcfffd77863303a8b6d9e65c6e81f1d2eed55b50d444"}, -] - -[[package]] -name = "txaio" -version = "23.1.1" -description = "Compatibility API between asyncio/Twisted/Trollius" -optional = false -python-versions = ">=3.7" -files = [ - {file = "txaio-23.1.1-py2.py3-none-any.whl", hash = "sha256:aaea42f8aad50e0ecfb976130ada140797e9dcb85fad2cf72b0f37f8cefcb490"}, - {file = "txaio-23.1.1.tar.gz", hash = "sha256:f9a9216e976e5e3246dfd112ad7ad55ca915606b60b84a757ac769bd404ff704"}, -] - -[package.extras] -all = ["twisted (>=20.3.0)", "zope.interface (>=5.2.0)"] -dev = ["pep8 (>=1.6.2)", "pyenchant (>=1.6.6)", "pytest (>=2.6.4)", "pytest-cov (>=1.8.1)", "sphinx (>=1.2.3)", "sphinx-rtd-theme (>=0.1.9)", "sphinxcontrib-spelling (>=2.1.2)", "tox (>=2.1.1)", "tox-gh-actions (>=2.2.0)", "twine (>=1.6.5)", "wheel"] -twisted = ["twisted (>=20.3.0)", "zope.interface (>=5.2.0)"] - -[[package]] -name = "types-pytz" -version = "2023.3.0.1" -description = "Typing stubs for pytz" -optional = false -python-versions = "*" -files = [ - {file = "types-pytz-2023.3.0.1.tar.gz", hash = "sha256:1a7b8d4aac70981cfa24478a41eadfcd96a087c986d6f150d77e3ceb3c2bdfab"}, - {file = "types_pytz-2023.3.0.1-py3-none-any.whl", hash = "sha256:65152e872137926bb67a8fe6cc9cfd794365df86650c5d5fdc7b167b0f38892e"}, -] - -[[package]] -name = "types-pyyaml" -version = "6.0.12.11" -description = "Typing stubs for PyYAML" -optional = false -python-versions = "*" -files = [ - {file = "types-PyYAML-6.0.12.11.tar.gz", hash = "sha256:7d340b19ca28cddfdba438ee638cd4084bde213e501a3978738543e27094775b"}, - {file = "types_PyYAML-6.0.12.11-py3-none-any.whl", hash = "sha256:a461508f3096d1d5810ec5ab95d7eeecb651f3a15b71959999988942063bf01d"}, -] - -[[package]] -name = "typing-extensions" -version = "4.7.1" -description = "Backported and Experimental Type Hints for Python 3.7+" -optional = false -python-versions = ">=3.7" -files = [ - {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, - {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, -] - -[[package]] -name = "tzdata" -version = "2023.3" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, - {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, -] - -[[package]] -name = "urllib3" -version = "2.0.4" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.7" -files = [ - {file = "urllib3-2.0.4-py3-none-any.whl", hash = "sha256:de7df1803967d2c2a98e4b11bb7d6bd9210474c46e8a0401514e3a42a75ebde4"}, - {file = "urllib3-2.0.4.tar.gz", hash = "sha256:8d22f86aae8ef5e410d4f539fde9ce6b2113a001bb4d189e0aed70642d602b11"}, -] - -[package.dependencies] -pysocks = {version = ">=1.5.6,<1.5.7 || >1.5.7,<2.0", optional = true, markers = "extra == \"socks\""} - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "vine" -version = "5.0.0" -description = "Promises, promises, promises." -optional = false -python-versions = ">=3.6" -files = [ - {file = "vine-5.0.0-py2.py3-none-any.whl", hash = "sha256:4c9dceab6f76ed92105027c49c823800dd33cacce13bdedc5b914e3514b7fb30"}, - {file = "vine-5.0.0.tar.gz", hash = "sha256:7d3b1624a953da82ef63462013bbd271d3eb75751489f9807598e8f340bd637e"}, -] - -[[package]] -name = "virtualenv" -version = "20.24.3" -description = "Virtual Python Environment builder" -optional = false -python-versions = ">=3.7" -files = [ - {file = "virtualenv-20.24.3-py3-none-any.whl", hash = "sha256:95a6e9398b4967fbcb5fef2acec5efaf9aa4972049d9ae41f95e0972a683fd02"}, - {file = "virtualenv-20.24.3.tar.gz", hash = "sha256:e5c3b4ce817b0b328af041506a2a299418c98747c4b1e68cb7527e74ced23efc"}, -] - -[package.dependencies] -distlib = ">=0.3.7,<1" -filelock = ">=3.12.2,<4" -platformdirs = ">=3.9.1,<4" - -[package.extras] -docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] - -[[package]] -name = "waitress" -version = "2.1.2" -description = "Waitress WSGI server" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "waitress-2.1.2-py3-none-any.whl", hash = "sha256:7500c9625927c8ec60f54377d590f67b30c8e70ef4b8894214ac6e4cad233d2a"}, - {file = "waitress-2.1.2.tar.gz", hash = "sha256:780a4082c5fbc0fde6a2fcfe5e26e6efc1e8f425730863c04085769781f51eba"}, -] - -[package.extras] -docs = ["Sphinx (>=1.8.1)", "docutils", "pylons-sphinx-themes (>=1.0.9)"] -testing = ["coverage (>=5.0)", "pytest", "pytest-cover"] - -[[package]] -name = "watchdog" -version = "2.3.1" -description = "Filesystem events monitoring" -optional = false -python-versions = ">=3.6" -files = [ - {file = "watchdog-2.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d1f1200d4ec53b88bf04ab636f9133cb703eb19768a39351cee649de21a33697"}, - {file = "watchdog-2.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:564e7739abd4bd348aeafbf71cc006b6c0ccda3160c7053c4a53b67d14091d42"}, - {file = "watchdog-2.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:95ad708a9454050a46f741ba5e2f3468655ea22da1114e4c40b8cbdaca572565"}, - {file = "watchdog-2.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a073c91a6ef0dda488087669586768195c3080c66866144880f03445ca23ef16"}, - {file = "watchdog-2.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aa8b028750b43e80eea9946d01925168eeadb488dfdef1d82be4b1e28067f375"}, - {file = "watchdog-2.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:964fd236cd443933268ae49b59706569c8b741073dbfd7ca705492bae9d39aab"}, - {file = "watchdog-2.3.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:91fd146d723392b3e6eb1ac21f122fcce149a194a2ba0a82c5e4d0ee29cd954c"}, - {file = "watchdog-2.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:efe3252137392a471a2174d721e1037a0e6a5da7beb72a021e662b7000a9903f"}, - {file = "watchdog-2.3.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:85bf2263290591b7c5fa01140601b64c831be88084de41efbcba6ea289874f44"}, - {file = "watchdog-2.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8f2df370cd8e4e18499dd0bfdef476431bcc396108b97195d9448d90924e3131"}, - {file = "watchdog-2.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ea5d86d1bcf4a9d24610aa2f6f25492f441960cf04aed2bd9a97db439b643a7b"}, - {file = "watchdog-2.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6f5d0f7eac86807275eba40b577c671b306f6f335ba63a5c5a348da151aba0fc"}, - {file = "watchdog-2.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b848c71ef2b15d0ef02f69da8cc120d335cec0ed82a3fa7779e27a5a8527225"}, - {file = "watchdog-2.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0d9878be36d2b9271e3abaa6f4f051b363ff54dbbe7e7df1af3c920e4311ee43"}, - {file = "watchdog-2.3.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4cd61f98cb37143206818cb1786d2438626aa78d682a8f2ecee239055a9771d5"}, - {file = "watchdog-2.3.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3d2dbcf1acd96e7a9c9aefed201c47c8e311075105d94ce5e899f118155709fd"}, - {file = "watchdog-2.3.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:03f342a9432fe08107defbe8e405a2cb922c5d00c4c6c168c68b633c64ce6190"}, - {file = "watchdog-2.3.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7a596f9415a378d0339681efc08d2249e48975daae391d58f2e22a3673b977cf"}, - {file = "watchdog-2.3.1-py3-none-manylinux2014_armv7l.whl", hash = "sha256:0e1dd6d449267cc7d6935d7fe27ee0426af6ee16578eed93bacb1be9ff824d2d"}, - {file = "watchdog-2.3.1-py3-none-manylinux2014_i686.whl", hash = "sha256:7a1876f660e32027a1a46f8a0fa5747ad4fcf86cb451860eae61a26e102c8c79"}, - {file = "watchdog-2.3.1-py3-none-manylinux2014_ppc64.whl", hash = "sha256:2caf77ae137935c1466f8cefd4a3aec7017b6969f425d086e6a528241cba7256"}, - {file = "watchdog-2.3.1-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:53f3e95081280898d9e4fc51c5c69017715929e4eea1ab45801d5e903dd518ad"}, - {file = "watchdog-2.3.1-py3-none-manylinux2014_s390x.whl", hash = "sha256:9da7acb9af7e4a272089bd2af0171d23e0d6271385c51d4d9bde91fe918c53ed"}, - {file = "watchdog-2.3.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:8a4d484e846dcd75e96b96d80d80445302621be40e293bfdf34a631cab3b33dc"}, - {file = "watchdog-2.3.1-py3-none-win32.whl", hash = "sha256:a74155398434937ac2780fd257c045954de5b11b5c52fc844e2199ce3eecf4cf"}, - {file = "watchdog-2.3.1-py3-none-win_amd64.whl", hash = "sha256:5defe4f0918a2a1a4afbe4dbb967f743ac3a93d546ea4674567806375b024adb"}, - {file = "watchdog-2.3.1-py3-none-win_ia64.whl", hash = "sha256:4109cccf214b7e3462e8403ab1e5b17b302ecce6c103eb2fc3afa534a7f27b96"}, - {file = "watchdog-2.3.1.tar.gz", hash = "sha256:d9f9ed26ed22a9d331820a8432c3680707ea8b54121ddcc9dc7d9f2ceeb36906"}, -] - -[package.extras] -watchmedo = ["PyYAML (>=3.10)"] - -[[package]] -name = "wcwidth" -version = "0.2.6" -description = "Measures the displayed width of unicode strings in a terminal" -optional = false -python-versions = "*" -files = [ - {file = "wcwidth-0.2.6-py2.py3-none-any.whl", hash = "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e"}, - {file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"}, -] - -[[package]] -name = "webob" -version = "1.8.7" -description = "WSGI request and response object" -optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*" -files = [ - {file = "WebOb-1.8.7-py2.py3-none-any.whl", hash = "sha256:73aae30359291c14fa3b956f8b5ca31960e420c28c1bec002547fb04928cf89b"}, - {file = "WebOb-1.8.7.tar.gz", hash = "sha256:b64ef5141be559cfade448f044fa45c2260351edcb6a8ef6b7e00c7dcef0c323"}, -] - -[package.extras] -docs = ["Sphinx (>=1.7.5)", "pylons-sphinx-themes"] -testing = ["coverage", "pytest (>=3.1.0)", "pytest-cov", "pytest-xdist"] - -[[package]] -name = "webtest" -version = "3.0.0" -description = "Helper to test WSGI applications" -optional = false -python-versions = ">=3.6, <4" -files = [ - {file = "WebTest-3.0.0-py3-none-any.whl", hash = "sha256:2a001a9efa40d2a7e5d9cd8d1527c75f41814eb6afce2c3d207402547b1e5ead"}, - {file = "WebTest-3.0.0.tar.gz", hash = "sha256:54bd969725838d9861a9fa27f8d971f79d275d94ae255f5c501f53bb6d9929eb"}, -] - -[package.dependencies] -beautifulsoup4 = "*" -waitress = ">=0.8.5" -WebOb = ">=1.2" - -[package.extras] -docs = ["Sphinx (>=1.8.1)", "docutils", "pylons-sphinx-themes (>=1.0.8)"] -tests = ["PasteDeploy", "WSGIProxy2", "coverage", "pyquery", "pytest", "pytest-cov"] - -[[package]] -name = "wmctrl" -version = "0.4" -description = "A tool to programmatically control windows inside X" -optional = false -python-versions = "*" -files = [ - {file = "wmctrl-0.4.tar.gz", hash = "sha256:66cbff72b0ca06a22ec3883ac3a4d7c41078bdae4fb7310f52951769b10e14e0"}, -] - -[[package]] -name = "wsproto" -version = "1.2.0" -description = "WebSockets state-machine based protocol implementation" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"}, - {file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"}, -] - -[package.dependencies] -h11 = ">=0.9.0,<1" - -[[package]] -name = "xlrd" -version = "2.0.1" -description = "Library for developers to extract data from Microsoft Excel (tm) .xls spreadsheet files" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ - {file = "xlrd-2.0.1-py2.py3-none-any.whl", hash = "sha256:6a33ee89877bd9abc1158129f6e94be74e2679636b8a205b43b85206c3f0bbdd"}, - {file = "xlrd-2.0.1.tar.gz", hash = "sha256:f72f148f54442c6b056bf931dbc34f986fd0c3b0b6b5a58d013c9aef274d0c88"}, -] - -[package.extras] -build = ["twine", "wheel"] -docs = ["sphinx"] -test = ["pytest", "pytest-cov"] - -[[package]] -name = "xlwt" -version = "1.3.0" -description = "Library to create spreadsheet files compatible with MS Excel 97/2000/XP/2003 XLS files, on any platform, with Python 2.6, 2.7, 3.3+" -optional = false -python-versions = "*" -files = [ - {file = "xlwt-1.3.0-py2.py3-none-any.whl", hash = "sha256:a082260524678ba48a297d922cc385f58278b8aa68741596a87de01a9c628b2e"}, - {file = "xlwt-1.3.0.tar.gz", hash = "sha256:c59912717a9b28f1a3c2a98fd60741014b06b043936dcecbc113eaaada156c88"}, -] - -[[package]] -name = "zipp" -version = "3.16.2" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "zipp-3.16.2-py3-none-any.whl", hash = "sha256:679e51dd4403591b2d6838a48de3d283f3d188412a9782faadf845f298736ba0"}, - {file = "zipp-3.16.2.tar.gz", hash = "sha256:ebc15946aa78bd63458992fc81ec3b6f7b1e92d51c35e6de1c3804e73b799147"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] - -[[package]] -name = "zope-interface" -version = "6.0" -description = "Interfaces for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "zope.interface-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f299c020c6679cb389814a3b81200fe55d428012c5e76da7e722491f5d205990"}, - {file = "zope.interface-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ee4b43f35f5dc15e1fec55ccb53c130adb1d11e8ad8263d68b1284b66a04190d"}, - {file = "zope.interface-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a158846d0fca0a908c1afb281ddba88744d403f2550dc34405c3691769cdd85"}, - {file = "zope.interface-6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f72f23bab1848edb7472309e9898603141644faec9fd57a823ea6b4d1c4c8995"}, - {file = "zope.interface-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48f4d38cf4b462e75fac78b6f11ad47b06b1c568eb59896db5b6ec1094eb467f"}, - {file = "zope.interface-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:87b690bbee9876163210fd3f500ee59f5803e4a6607d1b1238833b8885ebd410"}, - {file = "zope.interface-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f2363e5fd81afb650085c6686f2ee3706975c54f331b426800b53531191fdf28"}, - {file = "zope.interface-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:af169ba897692e9cd984a81cb0f02e46dacdc07d6cf9fd5c91e81f8efaf93d52"}, - {file = "zope.interface-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa90bac61c9dc3e1a563e5babb3fd2c0c1c80567e815442ddbe561eadc803b30"}, - {file = "zope.interface-6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89086c9d3490a0f265a3c4b794037a84541ff5ffa28bb9c24cc9f66566968464"}, - {file = "zope.interface-6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:809fe3bf1a91393abc7e92d607976bbb8586512913a79f2bf7d7ec15bd8ea518"}, - {file = "zope.interface-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:0ec9653825f837fbddc4e4b603d90269b501486c11800d7c761eee7ce46d1bbb"}, - {file = "zope.interface-6.0-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:790c1d9d8f9c92819c31ea660cd43c3d5451df1df61e2e814a6f99cebb292788"}, - {file = "zope.interface-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b39b8711578dcfd45fc0140993403b8a81e879ec25d53189f3faa1f006087dca"}, - {file = "zope.interface-6.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eba51599370c87088d8882ab74f637de0c4f04a6d08a312dce49368ba9ed5c2a"}, - {file = "zope.interface-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ee934f023f875ec2cfd2b05a937bd817efcc6c4c3f55c5778cbf78e58362ddc"}, - {file = "zope.interface-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:042f2381118b093714081fd82c98e3b189b68db38ee7d35b63c327c470ef8373"}, - {file = "zope.interface-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dfbbbf0809a3606046a41f8561c3eada9db811be94138f42d9135a5c47e75f6f"}, - {file = "zope.interface-6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:424d23b97fa1542d7be882eae0c0fc3d6827784105264a8169a26ce16db260d8"}, - {file = "zope.interface-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e538f2d4a6ffb6edfb303ce70ae7e88629ac6e5581870e66c306d9ad7b564a58"}, - {file = "zope.interface-6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12175ca6b4db7621aedd7c30aa7cfa0a2d65ea3a0105393e05482d7a2d367446"}, - {file = "zope.interface-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c3d7dfd897a588ec27e391edbe3dd320a03684457470415870254e714126b1f"}, - {file = "zope.interface-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:b3f543ae9d3408549a9900720f18c0194ac0fe810cecda2a584fd4dca2eb3bb8"}, - {file = "zope.interface-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d0583b75f2e70ec93f100931660328965bb9ff65ae54695fb3fa0a1255daa6f2"}, - {file = "zope.interface-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:23ac41d52fd15dd8be77e3257bc51bbb82469cf7f5e9a30b75e903e21439d16c"}, - {file = "zope.interface-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99856d6c98a326abbcc2363827e16bd6044f70f2ef42f453c0bd5440c4ce24e5"}, - {file = "zope.interface-6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1592f68ae11e557b9ff2bc96ac8fc30b187e77c45a3c9cd876e3368c53dc5ba8"}, - {file = "zope.interface-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4407b1435572e3e1610797c9203ad2753666c62883b921318c5403fb7139dec2"}, - {file = "zope.interface-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:5171eb073474a5038321409a630904fd61f12dd1856dd7e9d19cd6fe092cbbc5"}, - {file = "zope.interface-6.0.tar.gz", hash = "sha256:aab584725afd10c710b8f1e6e208dbee2d0ad009f57d674cb9d1b3964037275d"}, -] - -[package.dependencies] -setuptools = "*" - -[package.extras] -docs = ["Sphinx", "repoze.sphinx.autointerface"] -test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] -testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] - -[metadata] -lock-version = "2.0" -python-versions = "^3.9" -content-hash = "d655f0001c62e3cfdeb13a37b6a4c364f560edaca46074298135439b30e2b7be" diff --git a/pyproject.toml b/pyproject.toml index 8b8f8334..f101bac7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,117 +1,162 @@ -[tool.poetry] -name = "aurora" -version = "0.1.0" +[project] +name = "Aurora" +version = "2.0" description = "" -authors = ["sax "] -packages = [ - { include = "aurora", from = "src" }, +readme = "README.md" +license = {text = "MIT"} + +authors = [ + {name = "sax", email = "s.apostolico@gmail.com"}, + {name = "Domenico DiNicola", email = "dom.dinicola@gmail.com"}, +] +requires-python = ">=3.12" +dependencies = [ + "Faker", + "Markdown", + "Pillow", + "beautifulsoup4", + "celery", + "channels-redis", + "channels[daphne]", + "cryptography", + "django-admin-extra-buttons", + "django-admin-ordering", + "django-admin-sync", + "django-adminactions", + "django-adminfilters", + "django-anymail[mailjet]", + "django-appconf", + "django-bitfield", + "django-click", + "django-concurrency", + "django-constance", + "django-cors-headers", + "django-csp", + "django-debug-toolbar", + "django-environ", + "django-filter", + "django-flags", + "django-front-door", + "django-hijack", + "django-import-export", + "django-jsoneditor", + "django-mdeditor", + "django-mptt", + "django-picklefield", + "django-pwa", + "django-redis", + "django-regex", + "django-reversion", + "django-reversion-compare", + "django-simple-captcha", + "django-simple-math-captcha", + "django-smart-admin", + "django-smart-env", + "django-strategy-field", + "django-sysinfo", + "django-tinymce", + "django<5.1", + "djangorestframework", + "djangorestframework-datatables", + "htmlmin", + "jmespath", + "jsonpickle", + "mini-racer", + "natural-keys", + "psycopg2-binary", + "py-mini-racer", + "pycryptodome", + "pygments", + "pyquery", + "qrcode", + "sentry-sdk", + "setuptools>=75.6.0", + "social-auth-app-django", + "soupsieve", + "sqlparse", + "uwsgi", ] -[tool.poetry.dependencies] -Faker = "^14.0.0" -Markdown = "^3.3.6" -Pillow = "^9.0.1" -celery = "^5.2.6" -cryptography = "*" -django = "^3" -django-admin-extra-buttons = ">=1.5.6" -django-admin-ordering = "*" -django-admin-sync = ">=0.7.1" -django-adminactions = "^2" -django-adminfilters = "^2" -django-appconf = "^1.0.5" -django-bitfield = "^2.1" -django-click = "^2.3" -django-concurrency = "^2.4" -django-constance = "^2.8" -django-cors-headers = "^3.11.0" -django-csp = "^3.7" -django-environ = "^0.9" -django-flags = "^5" -django-import-export = "*" -django-jsoneditor = "^0.2" -django-picklefield = "^3.0" -django-redis = "^5.2.0" -django-regex = "^0.5" -django-reversion = "^5.0" -django-reversion-compare = "^0.15" -django-simple-captcha = "^0.5.17" -django-simple-math-captcha = "^2" -django-smart-admin = ">=2" -django-strategy-field = "^3" -django-sysinfo = "^2" -django_regex = "*" -djangorestframework = "^3" -htmlmin = "^0.1" -jmespath = "^1.0" -jsonpickle="^2.1" -natural-keys = "^2.0.0" -psycopg2-binary = "^2.9.3" -py-mini-racer = "^0.6.0" -pycryptodome = "^3.14.1" -#pyduktape2 = "^0.4.3" -python = "^3.9" -qrcode = "^7.3.1" -sentry-sdk = "^1.5" -setuptools = "^65.5.0" -social-auth-app-django = "^5.0.0" -sqlparse = "^0.4.2" -django-tinymce = "^3.5.0" -django-pwa = "^1.0.10" -django-mptt = "^0.14.0" -django-hijack = "^3.2.6" -channels = {extras = ["daphne"], version = "^4.0.0"} -channels-redis = "^4.0.0" -django-mdeditor = "^0.1.20" -djangorestframework-datatables = "^0.7.0" -django-filter = "^22.1" -django-front-door = "^0.10.0" -django-debug-toolbar = "^3" -pygments = "^2.14.0" -soupsieve = "^2.4" -pyquery = "^2.0.0" -beautifulsoup4 = "^4.12.0" +[uv] +package = true -[tool.poetry.dev-dependencies] -black = "^22.1.0" -coverage = "*" -django-webtest = "^1.9.7" -factory_boy = "*" -flake8 = "*" -freezegun = "^1.2.2" -ipython = "*" -isort = "^5.6.4" -pdbpp = "*" -pre-commit = "*" -pyflakes = "*" -pytest = "*" -pytest-asyncio = "*" -pytest-coverage = "^0.0" -pytest-django = "^4.1.0" -pytest-echo = "*" -pytest-pythonpath = "^0.7.3" -pytest-selenium = "^2.0" -tox = "*" -watchdog = "^2.1.6" -django-stubs = {extras = ["compatible-mypy"], version = "^1.16.0"} +[project.optional-dependencies] +docs = [ + "mkdocs", + "mkdocs-material", + "mkdocs-awesome-pages-plugin", + "mkdocstrings-python", + "mkdocs-gen-files", +] + +[tool.uv] +dev-dependencies = [ + "black", + "coverage", + "django-stubs[compatible-mypy]", + "django-webtest", + "factory-boy", + "flake8", + "flake8-html", + "freezegun", + "ipython", + "isort", + "pdbpp", + "pre-commit", + "pyflakes", + "pytest-asyncio", + "pytest-coverage", + "pytest-django", + "pytest-echo", + "pytest-html", + "pytest-pythonpath", + "pytest-selenium", + "pytest-variables", + "pytest-xdist", + "pytest", + "tox", + "watchdog", +] [build-system] -requires = ["poetry-core>=1.1.4"] -build-backend = "poetry.core.masonry.api" +requires = ["setuptools"] +build-backend = "setuptools.build_meta" -[tool.isort] -profile = "black" +[tool.setuptools.package-dir] +"" = "src" + +[tool.setuptools] +packages = ["aurora", "dbtemplates"] [tool.black] line-length = 120 -target-version = ['py39'] include = '\.pyi?$' exclude = ''' /( - \.toml - |\.sh - |\.git - |\.ini - |Dockerfile + \.git + | \.pytest_cache + | \.tox + | \.venv + | ~build + | build + | ops + | migrations )/ ''' + +[tool.isort] +profile = "black" +line_length = 120 +default_section = "THIRDPARTY" +known_first_party = [] +known_django = "django" +sections = ["FUTURE","STDLIB","DJANGO","THIRDPARTY","FIRSTPARTY","LOCALFOLDER"] +include_trailing_comma = true +skip = ["migrations", "snapshots", ".venv"] + + +[tool.nitpick] + style = [ + "github://unicef/hope-code-conventions@main/django/django.toml" + ] + cache = "1 day" diff --git a/pytest.ini b/pytest.ini index 343cc04b..efb5ff45 100644 --- a/pytest.ini +++ b/pytest.ini @@ -4,7 +4,7 @@ django_find_project = true DJANGO_SETTINGS_MODULE = aurora.config.settings log_format = %(asctime)s %(levelname)s %(message)s log_level = CRITICAL -python_paths = tests/extras src/ +pythonpath = tests/extras src/ log_date_format = %Y-%m-%d %H:%M:%S addopts = --cov=aurora @@ -12,6 +12,7 @@ addopts = --cov-report=xml --cov-config=./tests/.coveragerc --reuse-db + --maxfail=20 --tb=short --capture=no --echo-version django @@ -26,5 +27,3 @@ markers = python_files=test_*.py filterwarnings = ignore::DeprecationWarning - ignore::django.utils.deprecation.RemovedInDjango40Warning - ignore::django.utils.deprecation.RemovedInDjango41Warning diff --git a/ruff.toml b/ruff.toml new file mode 100644 index 00000000..941d1975 --- /dev/null +++ b/ruff.toml @@ -0,0 +1,91 @@ +target-version = "py313" +line-length = 120 + +[lint.isort] +case-sensitive = true + +[lint] +select = [ + "A", # prevent using keywords that clobber python builtins +# "ANN", # flake8 annotations + "B", # bugbear: security warnings + "BLE", # blind exceptions + "C4", # flake8-comprehensions + "C90", # McCabe complexity + "COM", # flake8-commas + "D", # pydocstyle + "DJ", # flake8-django + "E", # pycodestylex + "E4", "E7", "E9", + "ERA", # eradicate + "F", # pyflakes + "FLY", # flynt + "FURB", # refurb + "I", # isort + "ICN", # flake8-import-conventions + "ISC", # implicit string concatenation + "N", # Pep* naming + "PERF", # perflint + "PIE", # flake8-pie + "PL", # PyLint + "PT", # flake8-pytest-style + "Q", # flake8-quotes + "R", # PyLint Refactor + "RET", # flake8-return + "S", # bandit, + "SIM", # flake8-simplify + "T10", # flake8-debugger + "T20", # flake8-print + "TC", # flake8-type-checking + "UP", # pyupgrade + "W", # pycodestyle warnings + "YTT", # flake8 2020 +] +extend-select = ["UP", ] +ignore = [ + "ANN401", + "B904", # raise-without-from-inside-except: syntax not compatible with py2 + "COM812", + "D100", # Missing docstring in public module + "D101", # Missing docstring in public class + "D102", # Missing docstring in public method + "D103", # Missing docstring in public function + "D104", # Missing docstring in public package + "D105", # Missing docstring in magic method + "D106", # Missing docstring in public nested class + "D107", # Missing docstring in `__init__` + "D203", # one-blank-line-before-class + "D212", # multi-line-summary-first-line + "D213", # multi-line-summary-second-line + "E731", # lambda-assignment: lambdas are substential in maintenance of py2/3 codebase + "ISC001", # conflicts with ruff format command + "RUF005", # collection-literal-concatenation: syntax not compatible with py2 + "RUF012", # mutable-class-default: typing is not available for py2 + "I001", # unsorted imports https://docs.astral.sh/ruff/rules/unsorted-imports/#unsorted-imports-i001 + "UP037", # [*] Remove quotes from type annotation + "UP035", # Import from `collections.abc` instead: `Sequence` + "UP031", # Use format specifiers instead of percent format + "SIM108", # Use ternary operator instead of... + "PLR2004", # Magic value used in comparison + "DJ001", # Avoid using `null=True` on string-based fields such as `CharField` + + # todos + "BLE001", # blind exception + "S324", # insecure md5 + "C901", # too complex + "PLR0912", + "PLR0913", + "PLR0915", + "PLW0603" +] + +[format] +quote-style = "double" +indent-style = "space" +skip-magic-trailing-comma = false +line-ending = "auto" + +[lint.per-file-ignores] +"tests/**.py" = ["S101", "PLR2004", "S", "SIM117", "D", "UP", "PLR0913", "ANN", "N999"] +"src/dbtemplates/test**.py" = ["S101", "PLR2004", "S", "SIM117", "D", "UP", "PLR0913", "ANN", "N999", "PT009"] +"src/**/versioning/**.py" = ["N999", ] diff --git a/src/aurora/__init__.py b/src/aurora/__init__.py index 78cc22f5..2bec683e 100644 --- a/src/aurora/__init__.py +++ b/src/aurora/__init__.py @@ -1,31 +1 @@ -import os -from functools import lru_cache -from subprocess import STDOUT - -VERSION = os.environ.get("VERSION", "") - - -@lru_cache(1) -def get_full_version(git_commit=True): - commit = "" - if git_commit: - import subprocess - - try: - res = subprocess.check_output(["git", "rev-parse", "--short", "HEAD"], stderr=STDOUT) - commit = "-" + res.decode("utf8")[:-1] - except (subprocess.CalledProcessError, FileNotFoundError): # pragma: no-cover - pass - - return f"{VERSION}{commit}" - - -@lru_cache(1) -def get_git_status(clean="(nothing to commit)", dirty="(uncommitted changes)"): - import subprocess - - try: - uncommited = subprocess.check_output(["git", "status", "-s"], stderr=STDOUT) - return dirty if uncommited else clean - except (subprocess.CalledProcessError, FileNotFoundError): # pragma: no-cover - return "" +VERSION = __version__ = "2.0.0" diff --git a/src/aurora/administration/admin.py b/src/aurora/administration/admin.py deleted file mode 100644 index 8886a171..00000000 --- a/src/aurora/administration/admin.py +++ /dev/null @@ -1,44 +0,0 @@ -# from admin_extra_buttons.decorators import button -# from adminactions.helpers import AdminActionPermMixin -# from django.utils.translation import gettext_lazy as _ -# from hijack.templatetags.hijack import can_hijack -# from smart_admin.smart_auth.admin import GroupAdmin, UserAdmin -# -# from aurora.administration.hijack import impersonate -# from aurora.core.admin_sync import SyncMixin -# from aurora.core.utils import is_root -# -# -# class AuroraUserAdmin(AdminActionPermMixin, UserAdmin): -# fieldsets = ( -# (None, {"fields": ("username", "password")}), -# (_("Personal info"), {"fields": ("first_name", "last_name", "email")}), -# ( -# _("Permissions"), -# { -# "fields": ( -# "is_active", -# "is_staff", -# "is_superuser", -# "groups", -# ), -# }, -# ), -# (_("Important dates"), {"fields": ("last_login", "date_joined")}), -# ) -# -# @button(permission=lambda req, obj, **kw: is_root(req) and can_hijack(req.user, obj)) -# def hijack(self, request, pk): -# hijacked = self.get_object(request, pk) -# impersonate(request, hijacked) -# -# -# class AuroraUserAdminTemp(AuroraUserAdmin): -# list_display = ("username", "email", "first_name", "last_name", "is_staff", "is_superuser", "user") -# -# def get_list_display(self, request): -# return super().get_list_display(request) -# -# -# class AuroraGroupAdmin(AdminActionPermMixin, SyncMixin, GroupAdmin): -# pass diff --git a/src/aurora/administration/apps.py b/src/aurora/administration/apps.py index 72c948fb..2cfee88e 100644 --- a/src/aurora/administration/apps.py +++ b/src/aurora/administration/apps.py @@ -8,6 +8,7 @@ class AuroraAdminConfig(AppConfig): def ready(self): super().ready() from django.contrib.admin import site + from smart_admin.console import ( panel_email, panel_error_page, diff --git a/src/aurora/administration/filters.py b/src/aurora/administration/filters.py index ec305bf4..d1dc423d 100644 --- a/src/aurora/administration/filters.py +++ b/src/aurora/administration/filters.py @@ -1,10 +1,11 @@ -from adminfilters.autocomplete import get_real_field -from adminfilters.mixin import MediaDefinitionFilter, SmartFieldListFilter from django.conf import settings from django.contrib.admin.widgets import SELECT2_TRANSLATIONS from django.urls import reverse from django.utils.translation import get_language +from adminfilters.autocomplete import get_real_field +from adminfilters.mixin import MediaDefinitionFilter, SmartFieldListFilter + from aurora.core.version_media import VersionMedia @@ -76,6 +77,5 @@ def get_title(self): if not self.can_negate and self.negated: if self.negated_title: return self.negated_title - else: - return f"not {self.title}" + return f"not {self.title}" return self.filter_title or self.title diff --git a/src/aurora/administration/forms.py b/src/aurora/administration/forms.py index e4bf9942..dd18f9c8 100644 --- a/src/aurora/administration/forms.py +++ b/src/aurora/administration/forms.py @@ -1,18 +1,28 @@ import base64 import urllib.parse -import sqlparse from django import forms from django.core.exceptions import ValidationError +import sqlparse + class ImportForm(forms.Form): file = forms.FileField() class ExportForm(forms.Form): - APPS = ("core", "registration", "i18n", "constance", "counters", "flatpages", "security", "dbtemplates") - apps = forms.MultipleChoiceField(choices=zip(APPS, APPS), widget=forms.CheckboxSelectMultiple()) + APPS = ( + "core", + "registration", + "i18n", + "constance", + "counters", + "flatpages", + "security", + "dbtemplates", + ) + apps = forms.MultipleChoiceField(choices=zip(APPS, APPS, strict=True), widget=forms.CheckboxSelectMultiple()) class SQLForm(forms.Form): diff --git a/src/aurora/administration/hijack.py b/src/aurora/administration/hijack.py index c1848d42..ec936060 100644 --- a/src/aurora/administration/hijack.py +++ b/src/aurora/administration/hijack.py @@ -1,4 +1,5 @@ from django.contrib.auth import login + from hijack import signals from hijack.templatetags.hijack import can_hijack from hijack.views import get_used_backend, keep_session_age diff --git a/src/aurora/administration/mixin.py b/src/aurora/administration/mixin.py index 3c7cbc50..2a8a0cb3 100644 --- a/src/aurora/administration/mixin.py +++ b/src/aurora/administration/mixin.py @@ -3,13 +3,14 @@ import tempfile from pathlib import Path -from admin_extra_buttons.decorators import button -from admin_extra_buttons.mixins import ExtraButtonsMixin -from concurrency.api import disable_concurrency from django.contrib import messages from django.core.management import call_command from django.http import JsonResponse +from admin_extra_buttons.decorators import button +from admin_extra_buttons.mixins import ExtraButtonsMixin +from concurrency.api import disable_concurrency + from aurora.core.utils import render from .forms import ImportForm @@ -54,7 +55,11 @@ def loaddata(self, request): finally: fixture.unlink() except Exception as e: - self.message_user(request, f"{e.__class__.__name__}: {e} {out.getvalue()}", messages.ERROR) + self.message_user( + request, + f"{e.__class__.__name__}: {e} {out.getvalue()}", + messages.ERROR, + ) else: ctx["form"] = form else: diff --git a/src/aurora/administration/panels.py b/src/aurora/administration/panels.py index 85b60b12..d872548c 100644 --- a/src/aurora/administration/panels.py +++ b/src/aurora/administration/panels.py @@ -4,8 +4,6 @@ import tempfile from pathlib import Path -import sqlparse -from concurrency.api import disable_concurrency from django.contrib import messages from django.core.exceptions import PermissionDenied from django.core.management import call_command @@ -13,10 +11,18 @@ from django.http import JsonResponse from django.shortcuts import render +import sqlparse +from concurrency.api import disable_concurrency + from .. import VERSION from ..core.utils import is_root -from ..security.models import UserProfile + from .forms import ExportForm, ImportForm, SQLForm +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ..security.models import UserProfile + logger = logging.getLogger(__name__) @@ -67,7 +73,11 @@ def panel_loaddata(self, request): finally: fixture.unlink() except Exception as e: - messages.add_message(request, messages.ERROR, f"{e.__class__.__name__}: {e} {out.getvalue()}") + messages.add_message( + request, + messages.ERROR, + f"{e.__class__.__name__}: {e} {out.getvalue()}", + ) else: context["form"] = form @@ -142,7 +152,6 @@ def panel_sql(self, request, extra_context=None): if form.is_valid(): try: cmd = form.cleaned_data["command"] - # stm = urllib.parse.unquote(base64.b64decode(cmd).decode()) response["stm"] = sqlparse.format(cmd) if is_root(request): conn = connections[DEFAULT_DB_ALIAS] @@ -159,7 +168,6 @@ def panel_sql(self, request, extra_context=None): else: response["error"] = str(form.errors) return JsonResponse(response) - else: - form = SQLForm() + form = SQLForm() context["form"] = form return render(request, "admin/panels/sql.html", context) diff --git a/src/aurora/administration/templates/admin/_footer.html b/src/aurora/administration/templates/admin/_footer.html index 1b91ca92..d222438f 100644 --- a/src/aurora/administration/templates/admin/_footer.html +++ b/src/aurora/administration/templates/admin/_footer.html @@ -1,6 +1,6 @@ {% load static %}
- {{ project.build_date }} - {% include "i18n/_select_language.html" with languages=project.languages %} + Aurora {{ project.version }} - {{ project.build_date }} - {{ project.commit }} - {% include "i18n/_select_language.html" with languages=project.languages %} {% csrf_token %} - {% endblock %} + {% endblock script %} - {% endblock %} + {% endblock body %} diff --git a/src/aurora/api/urls.py b/src/aurora/api/urls.py index 42d4191d..b0f0d076 100644 --- a/src/aurora/api/urls.py +++ b/src/aurora/api/urls.py @@ -1,7 +1,7 @@ from django.urls import include, path -from .router import AuroraRouter from . import viewsets +from .router import AuroraRouter app_name = "api" @@ -11,15 +11,13 @@ router.register(r"flatpage", viewsets.FlatPageViewSet) router.register(r"form", viewsets.FlexFormViewSet) router.register(r"formset", viewsets.FormSetViewSet) +router.register(r"organization", viewsets.OrganizationViewSet) +router.register(r"project", viewsets.ProjectViewSet) +router.register(r"record", viewsets.RecordViewSet) router.register(r"registration", viewsets.RegistrationViewSet) router.register(r"template", viewsets.TemplateViewSet) -router.register(r"template", viewsets.TemplateViewSet) router.register(r"user", viewsets.UserViewSet) router.register(r"validator", viewsets.ValidatorViewSet) -router.register(r"project", viewsets.ProjectViewSet) -router.register(r"record", viewsets.RecordViewSet) -router.register(r"organization", viewsets.OrganizationViewSet) -router.register(r"validator", viewsets.ValidatorViewSet) urlpatterns = [ path("", include(router.urls)), diff --git a/src/aurora/api/viewsets/__init__.py b/src/aurora/api/viewsets/__init__.py index a077c370..594a19fc 100644 --- a/src/aurora/api/viewsets/__init__.py +++ b/src/aurora/api/viewsets/__init__.py @@ -1,13 +1,13 @@ -from .counter import CounterViewSet -from .field import FlexFormFieldViewSet -from .flatpage import FlatPageViewSet -from .form import FlexFormViewSet -from .formset import FormSetViewSet -from .org import OrganizationViewSet -from .project import ProjectViewSet -from .record import RecordViewSet -from .registration import RegistrationViewSet -from .sys import system_info -from .template import TemplateViewSet -from .user import UserViewSet -from .validator import ValidatorViewSet +from .counter import CounterViewSet # noqa +from .field import FlexFormFieldViewSet # noqa +from .flatpage import FlatPageViewSet # noqa +from .form import FlexFormViewSet # noqa +from .formset import FormSetViewSet # noqa +from .org import OrganizationViewSet # noqa +from .project import ProjectViewSet # noqa +from .record import RecordViewSet # noqa +from .registration import RegistrationViewSet # noqa +from .sys_info import system_info # noqa +from .template import TemplateViewSet # noqa +from .user import UserViewSet # noqa +from .validator import ValidatorViewSet # noqa diff --git a/src/aurora/api/viewsets/base.py b/src/aurora/api/viewsets/base.py index d9a30fd3..c6676eb0 100644 --- a/src/aurora/api/viewsets/base.py +++ b/src/aurora/api/viewsets/base.py @@ -25,7 +25,6 @@ class LastModifiedFilter(filters.FilterSet): modified_after = filters.DateFilter(label="Updated after", field_name="last_update_date", lookup_expr="gte") - # date_range = filters.DateFromToRangeFilter(widget=RangeWidget(attrs={'placeholder': 'YYYY/MM/DD'})) class IsRootUser(BasePermission): @@ -66,7 +65,11 @@ def filter_queryset(self, request, queryset, view): class SmartViewSet(viewsets.ReadOnlyModelViewSet): - authentication_classes = (SessionAuthentication, TokenAuthentication, BasicAuthentication) + authentication_classes = ( + SessionAuthentication, + TokenAuthentication, + BasicAuthentication, + ) permission_classes = (IsRootUser | AuroraPermission | DjangoModelPermissions,) filter_backends = [AuroraFilterBackend] filterset_class = LastModifiedFilter diff --git a/src/aurora/api/viewsets/counter.py b/src/aurora/api/viewsets/counter.py index d53c0014..85bad9e1 100644 --- a/src/aurora/api/viewsets/counter.py +++ b/src/aurora/api/viewsets/counter.py @@ -27,7 +27,10 @@ class CounterViewSet(SmartViewSet): serializer_class = CounterSerializer @action( - detail=False, permission_classes=[AllowAny], authentication_classes=[], throttle_classes=[ScopedRateThrottle2] + detail=False, + permission_classes=[AllowAny], + authentication_classes=[], + throttle_classes=[ScopedRateThrottle2], ) def refresh(self, request): Counter.objects.collect() @@ -40,9 +43,7 @@ def refresh(self, request): ) def throttled(self, request, wait): - """ - If request is throttled, determine what kind of exception to raise. - """ + """If request is throttled, determine what kind of exception to raise.""" latest = Counter.objects.latest() detail = "Request was throttled. Data updated to %s." % latest.day raise exceptions.Throttled(wait=wait, detail=detail) diff --git a/src/aurora/api/viewsets/field.py b/src/aurora/api/viewsets/field.py index 170a3abb..74e9275b 100644 --- a/src/aurora/api/viewsets/field.py +++ b/src/aurora/api/viewsets/field.py @@ -11,9 +11,7 @@ class Meta: class FlexFormFieldViewSet(SmartViewSet): - """ - This viewset automatically provides `list` and `retrieve` actions. - """ + """Viewset automatically provides `list` and `retrieve` actions.""" queryset = FlexFormField.objects.all() serializer_class = FlexFormFieldSerializer diff --git a/src/aurora/api/viewsets/formset.py b/src/aurora/api/viewsets/formset.py index 5cc1ee5c..6cb21c75 100644 --- a/src/aurora/api/viewsets/formset.py +++ b/src/aurora/api/viewsets/formset.py @@ -12,9 +12,7 @@ class Meta: class FormSetViewSet(SmartViewSet): - """ - This viewset automatically provides `list` and `retrieve` actions. - """ + """Viewset automatically provides `list` and `retrieve` actions.""" queryset = FormSet.objects.all() serializer_class = FormSetSerializer diff --git a/src/aurora/api/viewsets/org.py b/src/aurora/api/viewsets/org.py index 0c865920..3b1aa7e5 100644 --- a/src/aurora/api/viewsets/org.py +++ b/src/aurora/api/viewsets/org.py @@ -11,11 +11,8 @@ class OrganizationViewSet(SmartViewSet): @action(detail=True, methods=["GET"]) def projects(self, request, pk=None): - # item = Organization.objects.get(slug=slug) queryset = Project.objects.filter(organization__id=pk) page = self.paginate_queryset(queryset) serializer = ProjectSerializer(page, many=True, context={"request": request}) - response = self.get_paginated_response(serializer.data) - return response - # return Response(serializer.data, status=status.HTTP_200_OK) + return self.get_paginated_response(serializer.data) diff --git a/src/aurora/api/viewsets/project.py b/src/aurora/api/viewsets/project.py index 87e2de32..71e4623f 100644 --- a/src/aurora/api/viewsets/project.py +++ b/src/aurora/api/viewsets/project.py @@ -11,11 +11,9 @@ class ProjectViewSet(SmartViewSet): queryset = Project.objects.all() serializer_class = ProjectSerializer - # lookup_field = "slug" @action(detail=True, methods=["GET"]) def registrations(self, request, pk=None): - # item = Organization.objects.get(slug=slug) queryset = Registration.objects.filter(project__id=pk) serializer = RegistrationListSerializer(queryset, many=True, context={"request": request}) return Response(serializer.data, status=status.HTTP_200_OK) diff --git a/src/aurora/api/viewsets/record.py b/src/aurora/api/viewsets/record.py index dc40dc01..2de61b90 100644 --- a/src/aurora/api/viewsets/record.py +++ b/src/aurora/api/viewsets/record.py @@ -14,7 +14,13 @@ class RecordFilter(filters.FilterSet): class Meta: model = Record - fields = ["registration", "after", "id"] + fields = [ + "registration", + "after", + "id", + "registration__project", + "registration__project__organization", + ] class RecordPaginator(CursorPagination): @@ -38,13 +44,3 @@ def metadata(self, request, pk=None): "registration": latest.registration_id, } ) - - # class Meta: - # datatables_extra_json = ("fields", ) - # - # - # def get_fields(self): - # return "fields", { - # "artist": [{'label': obj.name, 'value': obj.pk} for obj in Artist.objects.all()], - # "genre": [{'label': obj.name, 'value': obj.pk} for obj in Genre.objects.all()] - # } diff --git a/src/aurora/api/viewsets/registration.py b/src/aurora/api/viewsets/registration.py index b6d3751b..00100559 100644 --- a/src/aurora/api/viewsets/registration.py +++ b/src/aurora/api/viewsets/registration.py @@ -5,8 +5,9 @@ from collections import OrderedDict from urllib import parse -from django.http import HttpResponse, HttpRequest +from django.http import HttpRequest, HttpResponse from django.utils.cache import get_conditional_response + from django_filters import rest_framework as filters from django_filters.rest_framework import DjangoFilterBackend from rest_framework import status @@ -17,7 +18,7 @@ from rest_framework.renderers import JSONRenderer from rest_framework.response import Response -from ...core.utils import get_etag, get_session_id, build_dict +from ...core.utils import build_dict, get_etag, get_session_id from ...registration.models import Record, Registration from ..serializers import RegistrationDetailSerializer, RegistrationListSerializer from ..serializers.record import DataTableRecordSerializer @@ -56,25 +57,16 @@ def get_serializer_class(self): def get_permissions(self): return [permission() for permission in self.permission_classes] - # def get_object(self): - # queryset = self.filter_queryset(self.get_queryset()) - # if self.kwargs["attr"].isnumeric(): - # filter_field = "pk" - # else: - # filter_field = "slug" - # obj = get_object_or_404(queryset, **{filter_field: self.kwargs["attr"]}) - # - # # May raise a permission denied - # self.check_object_permissions(self.request, obj) - # - # return obj - @action(detail=True, permission_classes=[AllowAny]) def metadata(self, request, pk=None): reg: Registration = self.get_object() return Response(reg.metadata) - @action(detail=True, permission_classes=[AllowAny], url_path="((?P[a-z-]*)/)*version") + @action( + detail=True, + permission_classes=[AllowAny], + url_path="((?P[a-z-]*)/)*version", + ) def version1(self, request, pk, language=""): reg: Registration = self.get_object() return Response( @@ -122,21 +114,23 @@ def records(self, request, pk=None): if page is None: serializer = DataTableRecordSerializer( - queryset, many=True, context={"request": request}, metadata=obj.metadata + queryset, + many=True, + context={"request": request}, + metadata=obj.metadata, ) return Response(serializer.data, status=status.HTTP_200_OK) - else: - serializer = DataTableRecordSerializer( - page, many=True, context={"request": request}, metadata=obj.metadata - ) - response = self.get_paginated_response(serializer.data) + serializer = DataTableRecordSerializer(page, many=True, context={"request": request}, metadata=obj.metadata) + response = self.get_paginated_response(serializer.data) response.headers.setdefault("ETag", self.res_etag) response.headers.setdefault("Cache-Control", "private, max-age=120") return response @action(detail=True) def csv(self, request: HttpRequest, pk): - """ + r""" + Return a CSV json for registration information. + "form": { "filters": "({}, {})", "include": "['']", @@ -161,9 +155,8 @@ def csv(self, request: HttpRequest, pk): } """ reg: Registration = self.get_object() + from aurora.core.forms import CSVOptionsForm, DateFormatsForm from aurora.registration.forms import RegistrationExportForm - from aurora.core.forms import CSVOptionsForm - from aurora.core.forms import DateFormatsForm try: form = RegistrationExportForm(request.GET, initial=RegistrationExportForm.defaults) @@ -171,7 +164,7 @@ def csv(self, request: HttpRequest, pk): fmt_form = DateFormatsForm(request.GET, prefix="fmt", initial=DateFormatsForm.defaults) if form.is_valid() and opts_form.is_valid() and fmt_form.is_valid(): for frm in [form, fmt_form, opts_form]: - for k, f in frm.defaults.items(): + for k in frm.defaults: if not frm.cleaned_data.get(k): frm.cleaned_data[k] = frm.defaults[k] filters, exclude = form.cleaned_data["filters"] @@ -194,7 +187,7 @@ def csv(self, request: HttpRequest, pk): all_fields = [] records = [build_dict(r, **fmt_form.cleaned_data) for r in qs] for r in records: - for field_name in r.keys(): + for field_name in r: if field_name not in skipped and field_name in exclude_fields: skipped.append(field_name) elif field_name not in all_fields and field_name in include_fields: @@ -229,41 +222,38 @@ def csv(self, request: HttpRequest, pk): headers=headers, content_type="text/plain", ) - else: - - return Response( - { - "reg": { - "name": reg.name, - "slug": reg.slug, - }, - "data": { - "download": request.build_absolute_uri( - "?download=1&" + parse.urlencode(request.GET.dict(), doseq=False) - ), - "preview": request.build_absolute_uri( - "?preview=1&" + parse.urlencode(request.GET.dict(), doseq=False) - ), - "count": qs.count(), - "filters": filters, - "exclude": exclude, - "include_fields": [r.pattern for r in include_fields], - "fieldnames": all_fields, - "skipped": skipped, - }, - "form": {k: str(v) for k, v in form.cleaned_data.items()}, - "fmt": {k: str(v) for k, v in fmt_form.cleaned_data.items()}, - "csv": {k: str(v) for k, v in opts_form.cleaned_data.items()}, - } - ) - else: return Response( { - "form": form.errors, - "fmt": fmt_form.errors, - "csv": opts_form.errors, + "reg": { + "name": reg.name, + "slug": reg.slug, + }, + "data": { + "download": request.build_absolute_uri( + "?download=1&" + parse.urlencode(request.GET.dict(), doseq=False) + ), + "preview": request.build_absolute_uri( + "?preview=1&" + parse.urlencode(request.GET.dict(), doseq=False) + ), + "count": qs.count(), + "filters": filters, + "exclude": exclude, + "include_fields": [r.pattern for r in include_fields], + "fieldnames": all_fields, + "skipped": skipped, + }, + "form": {k: str(v) for k, v in form.cleaned_data.items()}, + "fmt": {k: str(v) for k, v in fmt_form.cleaned_data.items()}, + "csv": {k: str(v) for k, v in opts_form.cleaned_data.items()}, } ) + return Response( + { + "form": form.errors, + "fmt": fmt_form.errors, + "csv": opts_form.errors, + } + ) except Exception as e: logger.exception(e) return Response({"message": "Error"}, status=500) diff --git a/src/aurora/api/viewsets/sys.py b/src/aurora/api/viewsets/sys_info.py similarity index 99% rename from src/aurora/api/viewsets/sys.py rename to src/aurora/api/viewsets/sys_info.py index 3b4981e3..9a731cb1 100644 --- a/src/aurora/api/viewsets/sys.py +++ b/src/aurora/api/viewsets/sys_info.py @@ -1,9 +1,10 @@ import os -from constance import config from django.conf import settings from django.http import JsonResponse +from constance import config + from aurora.core.utils import has_token diff --git a/src/aurora/api/viewsets/user.py b/src/aurora/api/viewsets/user.py index 6e2b8dd4..54317570 100644 --- a/src/aurora/api/viewsets/user.py +++ b/src/aurora/api/viewsets/user.py @@ -1,6 +1,7 @@ from django.contrib.auth import get_user_model from django.urls import reverse from django.utils.translation import get_language + from rest_framework import serializers from rest_framework.authentication import SessionAuthentication from rest_framework.decorators import action @@ -24,14 +25,16 @@ def get_permissions(self, obj): class UserViewSet(SmartViewSet): - """ - This viewset automatically provides `list` and `retrieve` actions. - """ + """Viewset automatically provides `list` and `retrieve` actions.""" queryset = User.objects.all() serializer_class = UserSerializer - @action(detail=False, permission_classes=[AllowAny], authentication_classes=[SessionAuthentication]) + @action( + detail=False, + permission_classes=[AllowAny], + authentication_classes=[SessionAuthentication], + ) def me(self, request): response = { "perms": [], diff --git a/src/aurora/api/viewsets/validator.py b/src/aurora/api/viewsets/validator.py index b953f59b..3bc51aeb 100644 --- a/src/aurora/api/viewsets/validator.py +++ b/src/aurora/api/viewsets/validator.py @@ -1,4 +1,5 @@ from django.http import HttpResponse + from rest_framework.authentication import SessionAuthentication from rest_framework.decorators import action from rest_framework.permissions import AllowAny @@ -18,7 +19,11 @@ class ValidatorViewSet(SmartViewSet): }}; """ - @action(detail=True, permission_classes=[AllowAny], authentication_classes=[SessionAuthentication]) + @action( + detail=True, + permission_classes=[AllowAny], + authentication_classes=[SessionAuthentication], + ) def validator(self, request, pk): obj = self.get_object() return HttpResponse( @@ -26,7 +31,11 @@ def validator(self, request, pk): content_type="application/javascript", ) - @action(detail=True, permission_classes=[AllowAny], authentication_classes=[SessionAuthentication]) + @action( + detail=True, + permission_classes=[AllowAny], + authentication_classes=[SessionAuthentication], + ) def script(self, request, pk): obj = self.get_object() return HttpResponse( diff --git a/src/aurora/checks.py b/src/aurora/checks.py deleted file mode 100644 index 90703087..00000000 --- a/src/aurora/checks.py +++ /dev/null @@ -1,21 +0,0 @@ -import os - -from django.core.checks import Error, register - - -@register("aurora", "env") -def check_env_setting(app_configs, **kwargs): - errors = [] - from .config import MANDATORY - - for entry, __ in MANDATORY.items(): - if entry not in os.environ: - errors.append( - Error( - f"{entry} environment variable is not set", - # hint=f"set '{entry}' environment variable", - obj="os.environ", - id="aurora.ENV", - ) - ) - return errors diff --git a/src/aurora/config/__init__.py b/src/aurora/config/__init__.py index ae10b4e0..5b6e67fe 100644 --- a/src/aurora/config/__init__.py +++ b/src/aurora/config/__init__.py @@ -2,6 +2,7 @@ from urllib.parse import urlencode, urlparse from environ import Env +from smart_env import SmartEnv from aurora.core.flags import parse_bool @@ -12,28 +13,15 @@ def parse_bookmarks(value): def parse_emails(value): admins = value.split(",") - v = [(a.split("@")[0].strip(), a.strip()) for a in admins] - return v + return [(a.split("@")[0].strip(), a.strip()) for a in admins] -MANDATORY = { - "CACHE_DEFAULT": (str, "locmemcache://"), - "CHANNEL_LAYER": (str, "locmemcache://"), - "DATABASE_URL": (str, "psql://postgres:@postgres:5432/aurora"), - "DJANGO_ADMIN_URL": (str, f"{uuid.uuid4().hex}/"), - "EMAIL_FROM_EMAIL": (str, ""), - "EMAIL_HOST": (str, ""), - "EMAIL_HOST_PASSWORD": (str, ""), - "EMAIL_HOST_USER": (str, ""), - "EMAIL_SUBJECT_PREFIX": (str, "[Aurora]"), - "FERNET_KEY": (str, uuid.uuid4().hex), - "MEDIA_ROOT": (str, "/tmp/media/"), - "STATIC_ROOT": (str, "/tmp/static/"), -} - OPTIONS = { "ADMINS": (parse_emails, ""), "ADMIN_SYNC_CONFIG": (str, "admin_sync.conf.DjangoConstance"), + "ADMIN_SYNC_LOCAL_ADMIN_URL": (str, ""), + "ADMIN_SYNC_REMOTE_ADMIN_URL": (str, ""), + "ADMIN_SYNC_REMOTE_SERVER": (str, ""), "ALLOWED_HOSTS": (list, ["*"]), "AUTHENTICATION_BACKENDS": (list, []), "AZURE_AUTHORITY_HOST": (str, ""), @@ -43,66 +31,78 @@ def parse_emails(value): "AZURE_POLICY_NAME": (str, ""), "AZURE_TENANT_ID": (str, ""), "AZURE_TENANT_KEY": (str, ""), - "CAPTCHA_TEST_MODE": (bool, "false"), - "TRANSLATOR_SERVICE": (str, ""), "AZURE_TRANSLATOR_KEY": (str, ""), "AZURE_TRANSLATOR_LOCATION": (str, ""), + "CACHE_DEFAULT": (str, "locmemcache://", "", True), + "CAPTCHA_TEST_MODE": (bool, "false"), + "CHANNEL_LAYER": (str, "locmemcache://", True), "CONSTANCE_DATABASE_CACHE_BACKEND": (str, ""), "CORS_ALLOWED_ORIGINS": (list, []), - "CSP_REPORT_ONLY": (bool, True), + "CSP_REPORT_ONLY": (bool, False, True), "CSRF_COOKIE_NAME": (str, "aurora"), + "CSRF_COOKIE_SECURE": (bool, True, False), + "CSRF_TRUSTED_ORIGINS": (list, [], []), + "DATABASE_URL": (str, "psql://postgres:@postgres:5432/aurora", True), "DEBUG": (bool, False), "DEBUG_PROPAGATE_EXCEPTIONS": (bool, False), - "DJANGO_ADMIN_TITLE": (str, "Aurora"), - "EMAIL_BACKEND": (str, "django.core.mail.backends.smtp.EmailBackend"), - # "EMAIL_FROM_EMAIL": (str, ""), - # "EMAIL_HOST": (str, ""), - # "EMAIL_HOST_PASSWORD": (str, ""), - # "EMAIL_HOST_USER": (str, ""), + "DEFAULT_FILE_STORAGE": (str, "django.core.files.storage.FileSystemStorage"), + "DJANGO_ADMIN_URL": (str, f"{uuid.uuid4().hex}/", True), + "EMAIL_BACKEND": (str, "anymail.backends.mailjet.EmailBackend"), + "EMAIL_FROM_EMAIL": (str, ""), + "EMAIL_HOST": (str, ""), + "EMAIL_HOST_PASSWORD": (str, ""), + "EMAIL_HOST_USER": (str, ""), "EMAIL_PORT": (int, 587), - # "EMAIL_SUBJECT_PREFIX": (str, "[Aurora]"), + "EMAIL_SUBJECT_PREFIX": (str, "[Aurora]"), "EMAIL_TIMEOUT": (int, 30), "EMAIL_USE_LOCALTIME": (bool, False), "EMAIL_USE_SSL": (bool, False), "EMAIL_USE_TLS": (bool, True), - "FRONT_DOOR_ENABLED": (bool, False), + "FERNET_KEY": (str, "", uuid.uuid4().hex, True), "FRONT_DOOR_ALLOWED_PATHS": (str, ".*"), - "FRONT_DOOR_TOKEN": (str, uuid.uuid4()), + "FRONT_DOOR_ENABLED": (bool, False), "FRONT_DOOR_LOG_LEVEL": (str, "ERROR"), + "FRONT_DOOR_TOKEN": (str, uuid.uuid4()), "GRAPH_API_ENABLED": (bool, False), - # "FERNET_KEY": (str, "2jQklRvSAZUdsVOKH-521Wbf_p5t2nTDA0LgD9sgim4="), - "INTERNAL_IPS": (list, ["127.0.0.1", "localhost"]), + "INTERNAL_IPS": (list, [], ["127.0.0.1", "localhost"]), + "JWT_LEEWAY": (int, 0), "LANGUAGE_CODE": (str, "en-us"), "LOG_LEVEL": (str, "ERROR"), + "MAILJET_API_KEY": (str, ""), + "MAILJET_SECRET_KEY": (str, ""), + "MATOMO_ID": (str, "", "", False), + "MATOMO_SITE": (str, "https://unisitetracker.unicef.io/"), + "MEDIA_ROOT": (str, "/tmp/media/"), # noqa "MIGRATION_LOCK_KEY": (str, "django-migrations"), "PRODUCTION_SERVER": (str, ""), "PRODUCTION_TOKEN": (str, ""), "REDIS_CONNSTR": (str, ""), - "ROOT_KEY": (str, uuid.uuid4().hex), - "ROOT_TOKEN": (str, uuid.uuid4().hex), - "SECRET_KEY": (str, ""), + "ROOT_KEY": (str, ""), + "ROOT_TOKEN": (str, ""), + "SECRET_KEY": (str, "", "", True), "SENTRY_DSN": (str, ""), + "SENTRY_ENVIRONMENT": (str, ""), "SENTRY_PROJECT": (str, ""), "SENTRY_SECURITY_TOKEN": (str, ""), "SENTRY_SECURITY_TOKEN_HEADER": (str, "X-Sentry-Token"), - "SESSION_COOKIE_DOMAIN": (str, "localhost"), + "SESSION_COOKIE_DOMAIN": (str, "", "", True), "SESSION_COOKIE_NAME": (str, "aurora_id"), - "SESSION_COOKIE_SECURE": (bool, "false"), + "SESSION_COOKIE_SECURE": (bool, True, False, True), + "SITE_ID": (int, 1), "SMART_ADMIN_BOOKMARKS": (parse_bookmarks, ""), - "STATICFILES_STORAGE": (str, "aurora.web.storage.ForgivingManifestStaticFilesStorage"), + "STATICFILES_STORAGE": ( + str, + "aurora.web.storage.ForgivingManifestStaticFilesStorage", + ), + "STATIC_ROOT": (str, "/tmp/static/"), # noqa + "STATIC_URL": (str, "static/"), + "TRANSLATOR_SERVICE": (str, ""), "USE_HTTPS": (bool, False), "USE_X_FORWARDED_HOST": (bool, "false"), - "SITE_ID": (int, 1), - # "CSP_DEFAULT_SRC": (list, ), - # "CSP_SCRIPT_SRC": (str, None), - # "FERNET_KEY": (str, "Nl_puP2z0-OKVNKMtPXx4jEI-ox7sKLM7CgnGT-yAug="), - # "STATICFILES_STORAGE": (str, "django.contrib.staticfiles.storage.StaticFilesStorage"), - # "STATIC_ROOT": (str, "/tmp/static/"), - # Sentry - see CONTRIBUTING.md } -class SmartEnv(Env): +class SmartEnv2(SmartEnv): def cache_url(self, var=Env.DEFAULT_CACHE_ENV, default=Env.NOTSET, backend=None): v = self.str(var, default) if v.startswith("redisraw://"): @@ -122,4 +122,4 @@ def cache_url(self, var=Env.DEFAULT_CACHE_ENV, default=Env.NOTSET, backend=None) return super().cache_url(var, default, backend) -env = SmartEnv(**MANDATORY, **OPTIONS) +env = SmartEnv2(**OPTIONS) diff --git a/src/aurora/config/asgi.py b/src/aurora/config/asgi.py index acd6ab7e..208f8def 100644 --- a/src/aurora/config/asgi.py +++ b/src/aurora/config/asgi.py @@ -1,20 +1,13 @@ import os -from channels.auth import AuthMiddlewareStack -from channels.routing import ProtocolTypeRouter, URLRouter -from channels.security.websocket import AllowedHostsOriginValidator from django.core.asgi import get_asgi_application -import aurora.core.channels +from channels.routing import ProtocolTypeRouter os.environ.setdefault("DJANGO_SETTINGS_MODULE", "aurora.config.settings") application = ProtocolTypeRouter( { "http": get_asgi_application(), - # Just HTTP for now. (We can add other protocols later.) - "websocket": AllowedHostsOriginValidator( - AuthMiddlewareStack(URLRouter(aurora.core.channels.websocket_urlpatterns)) - ), } ) diff --git a/src/aurora/config/fragments/__init__.py b/src/aurora/config/fragments/__init__.py new file mode 100644 index 00000000..8a114060 --- /dev/null +++ b/src/aurora/config/fragments/__init__.py @@ -0,0 +1,26 @@ +from .ad import * # noqa +from .admin_sync import * # noqa +from .anymail import * # noqa +from .azure_graph_api import * # noqa +from .capcha import * # noqa +from .channels import * # noqa +from .concurrency import * # noqa +from .constance import * # noqa +from .cors import * # noqa +from .csp import * # noqa +from .dbtemplate import * # noqa +from .django_toolbar import * # noqa +from .flags import * # noqa +from .front_door import * # noqa +from .hijack import * # noqa +from .json_editor import * # noqa +from .matomo import * # noqa +from .mdeditor import * # noqa +from .rest_framework import * # noqa +from .reversion import * # noqa +from .sentry import * # noqa +from .smart_admin import * # noqa +from .social_auth import * # noqa +from .strategy import * # noqa +from .sysinfo import * # noqa +from .translator import * # noqa diff --git a/src/aurora/config/fragments/ad.py b/src/aurora/config/fragments/ad.py new file mode 100644 index 00000000..17588069 --- /dev/null +++ b/src/aurora/config/fragments/ad.py @@ -0,0 +1,4 @@ +from .. import env + +AZURE_CLIENT_ID = env("AZURE_CLIENT_ID") +AZURE_CLIENT_SECRET = env("AZURE_CLIENT_SECRET") diff --git a/src/aurora/config/fragments/admin_sync.py b/src/aurora/config/fragments/admin_sync.py new file mode 100644 index 00000000..b94e517e --- /dev/null +++ b/src/aurora/config/fragments/admin_sync.py @@ -0,0 +1,8 @@ +from .. import env + +ADMIN_SYNC_CONFIG = env("ADMIN_SYNC_CONFIG") +ADMIN_SYNC_RESPONSE_HEADER = None +# these are actually used only in local development +ADMIN_SYNC_REMOTE_SERVER = env("ADMIN_SYNC_REMOTE_SERVER") +ADMIN_SYNC_REMOTE_ADMIN_URL = env("ADMIN_SYNC_REMOTE_ADMIN_URL") +ADMIN_SYNC_LOCAL_ADMIN_URL = env("ADMIN_SYNC_LOCAL_ADMIN_URL") diff --git a/src/aurora/config/fragments/anymail.py b/src/aurora/config/fragments/anymail.py new file mode 100644 index 00000000..ab849587 --- /dev/null +++ b/src/aurora/config/fragments/anymail.py @@ -0,0 +1,6 @@ +from .. import env + +ANYMAIL = { + "MAILJET_API_KEY": env("MAILJET_API_KEY"), + "MAILJET_SECRET_KEY": env("MAILJET_SECRET_KEY"), +} diff --git a/src/aurora/config/fragments/azure_graph_api.py b/src/aurora/config/fragments/azure_graph_api.py new file mode 100644 index 00000000..dc1eea40 --- /dev/null +++ b/src/aurora/config/fragments/azure_graph_api.py @@ -0,0 +1,4 @@ +# Graph API +AZURE_GRAPH_API_BASE_URL = "https://graph.microsoft.com" +AZURE_GRAPH_API_VERSION = "v1.0" +AZURE_TOKEN_URL = "https://login.microsoftonline.com/unicef.org/oauth2/token" # noqa diff --git a/src/aurora/config/fragments/capcha.py b/src/aurora/config/fragments/capcha.py new file mode 100644 index 00000000..c8178e49 --- /dev/null +++ b/src/aurora/config/fragments/capcha.py @@ -0,0 +1,6 @@ +from .. import env + +CAPTCHA_FONT_SIZE = 40 +CAPTCHA_CHALLENGE_FUNCT = "captcha.helpers.random_char_challenge" +CAPTCHA_TEST_MODE = env("CAPTCHA_TEST_MODE") +CAPTCHA_GET_FROM_POOL = True diff --git a/src/aurora/config/fragments/channels.py b/src/aurora/config/fragments/channels.py new file mode 100644 index 00000000..a40a369f --- /dev/null +++ b/src/aurora/config/fragments/channels.py @@ -0,0 +1,10 @@ +from .. import env + +CHANNEL_LAYERS = { + "default": { + "BACKEND": "channels_redis.core.RedisChannelLayer", + "CONFIG": { + "hosts": [env("CHANNEL_LAYER")], + }, + }, +} diff --git a/src/aurora/config/fragments/concurrency.py b/src/aurora/config/fragments/concurrency.py new file mode 100644 index 00000000..c3713e6c --- /dev/null +++ b/src/aurora/config/fragments/concurrency.py @@ -0,0 +1 @@ +CONCURRENCY_ENABLED = False diff --git a/src/aurora/config/fragments/constance.py b/src/aurora/config/fragments/constance.py new file mode 100644 index 00000000..31a51312 --- /dev/null +++ b/src/aurora/config/fragments/constance.py @@ -0,0 +1,47 @@ +from collections import OrderedDict + +from .. import env + +CONSTANCE_ADDITIONAL_FIELDS = { + "html_minify_select": [ + "bitfield.forms.BitFormField", + { + "initial": 0, + "required": False, + "choices": (("html", "HTML"), ("line", "NEWLINE"), ("space", "SPACES")), + }, + ], +} +CONSTANCE_BACKEND = "constance.backends.database.DatabaseBackend" +CONSTANCE_DATABASE_CACHE_BACKEND = env("CONSTANCE_DATABASE_CACHE_BACKEND") +CONSTANCE_CONFIG = OrderedDict( + { + "CACHE_FORMS": (False, "", bool), + "CACHE_VERSION": (1, "", int), + "HOME_PAGE_REGISTRATIONS": ("", "", str), + "SMART_ADMIN_BOOKMARKS": ( + "", + "", + str, + ), + "LOGIN_LOCAL": (True, "Enable local accounts login", bool), + "LOGIN_SSO": (True, "Enable SSO logon", bool), + "ADMIN_SYNC_REMOTE_SERVER": ("", "production server url", str), + "ADMIN_SYNC_REMOTE_ADMIN_URL": ("/admin/", "", str), + "ADMIN_SYNC_LOCAL_ADMIN_URL": ("/admin/", "", str), + "LOG_POST_ERRORS": (False, "", bool), + "MINIFY_RESPONSE": (0, "select yes or no", "html_minify_select"), + "MINIFY_IGNORE_PATH": (r"", "regex for ignored path", str), + "BASE_TEMPLATE": ("base_lean.html", "Default base template", str), + "HOME_TEMPLATE": ("home.html", "Default home.html", str), + "QRCODE": (True, "Enable QRCode generation", bool), + "SHOW_REGISTER_ANOTHER": (True, "Enable QRCode generation", bool), + "MAINTENANCE_MODE": (False, "set maintenance mode On/Off", bool), + "WAF_REGISTRATION_ALLOWED_HOSTNAMES": ( + ".*", + "public website hostname (regex)", + str, + ), + "WAF_ADMIN_ALLOWED_HOSTNAMES": ("", "admin website hostname (regex)", str), + } +) diff --git a/src/aurora/config/fragments/cors.py b/src/aurora/config/fragments/cors.py new file mode 100644 index 00000000..eb992d2c --- /dev/null +++ b/src/aurora/config/fragments/cors.py @@ -0,0 +1,12 @@ +from .. import env + +CORS_ALLOWED_ORIGINS = [ + "https://excubo.unicef.io", + "http://localhost:8000", + "https://browser.sentry-cdn.com", + "https://cdnjs.cloudflare.com", + "https://login.microsoftonline.com", +] + env("CORS_ALLOWED_ORIGINS") + +CORS_ORIGIN_ALLOW_ALL = True +CORS_ALLOW_CREDENTIALS = True diff --git a/src/aurora/config/fragments/csp.py b/src/aurora/config/fragments/csp.py new file mode 100644 index 00000000..ddd0624d --- /dev/null +++ b/src/aurora/config/fragments/csp.py @@ -0,0 +1,21 @@ +SOURCES = ( + "'self'", + "inline", + "unsafe-inline", + "data:", + "blob:", + "'unsafe-inline'", + "localhost:8000", + "unpkg.com", + "browser.sentry-cdn.com", + "cdnjs.cloudflare.com", + "unisitetracker.unicef.io", + "cdn.jsdelivr.net", + "register.unicef.org", + "uni-hope-ukr-sr.azurefd.net", + "uni-hope-ukr-sr-dev.azurefd.net", + "uni-hope-ukr-sr-dev.unitst.org", +) + +CSP_DEFAULT_SRC = SOURCES +CSP_FRAME_ANCESTORS = ("'self'",) diff --git a/src/aurora/config/fragments/dbtemplate.py b/src/aurora/config/fragments/dbtemplate.py new file mode 100644 index 00000000..7954a380 --- /dev/null +++ b/src/aurora/config/fragments/dbtemplate.py @@ -0,0 +1,2 @@ +DBTEMPLATES_USE_REVERSION = True +DBTEMPLATES_USE_CODEMIRROR = True diff --git a/src/aurora/config/fragments/django_toolbar.py b/src/aurora/config/fragments/django_toolbar.py new file mode 100644 index 00000000..95ffac9d --- /dev/null +++ b/src/aurora/config/fragments/django_toolbar.py @@ -0,0 +1,36 @@ +from django_regex.utils import RegexList + + +def show_ddt(request): # pragma: no-cover + if request.path in RegexList(("/tpl/.*", "/api/.*", "/dal/.*")): # pragma: no cache + return False + return None + + +DEBUG_TOOLBAR_CONFIG = { + "SHOW_TOOLBAR_CALLBACK": show_ddt, + "JQUERY_URL": "", + "INSERT_BEFORE": "", + "SHOW_TEMPLATE_CONTEXT": True, +} + +DEBUG_TOOLBAR_PANELS = [ + "debug_toolbar.panels.history.HistoryPanel", + # "debug_toolbar.panels.versions.VersionsPanel", + "aurora.ddt_panels.StatePanel", + "aurora.ddt_panels.MigrationPanel", + "debug_toolbar.panels.timer.TimerPanel", + "flags.panels.FlagsPanel", + "flags.panels.FlagChecksPanel", + "debug_toolbar.panels.settings.SettingsPanel", + "debug_toolbar.panels.headers.HeadersPanel", + "debug_toolbar.panels.request.RequestPanel", + "debug_toolbar.panels.sql.SQLPanel", + "debug_toolbar.panels.staticfiles.StaticFilesPanel", + "debug_toolbar.panels.templates.TemplatesPanel", + "debug_toolbar.panels.cache.CachePanel", + "debug_toolbar.panels.signals.SignalsPanel", + "debug_toolbar.panels.logging.LoggingPanel", + "debug_toolbar.panels.redirects.RedirectsPanel", + "debug_toolbar.panels.profiling.ProfilingPanel", +] diff --git a/src/aurora/config/fragments/flags.py b/src/aurora/config/fragments/flags.py new file mode 100644 index 00000000..42e9dc5c --- /dev/null +++ b/src/aurora/config/fragments/flags.py @@ -0,0 +1,10 @@ +from ..settings import DEBUG + +FLAGS_STATE_LOGGING = DEBUG + +FLAGS = { + "DEVELOP_DEVELOPER": [], + "DEVELOP_DEBUG_TOOLBAR": [], + "SENTRY_JAVASCRIPT": [], + "I18N_COLLECT_MESSAGES": [], +} diff --git a/src/aurora/config/fragments/front_door.py b/src/aurora/config/fragments/front_door.py new file mode 100644 index 00000000..caeb7abd --- /dev/null +++ b/src/aurora/config/fragments/front_door.py @@ -0,0 +1,16 @@ +from .. import env + +FRONT_DOOR_CONFIG = "front_door.conf.DjangoConstance" +FRONT_DOOR_ENABLED = env("FRONT_DOOR_ENABLED") +FRONT_DOOR_ALLOWED_PATHS = env("FRONT_DOOR_ALLOWED_PATHS") +FRONT_DOOR_TOKEN = env("FRONT_DOOR_TOKEN") +FRONT_DOOR_HEADER = "x-aurora" +FRONT_DOOR_COOKIE_NAME = "x-aurora" +FRONT_DOOR_COOKIE_PATTERN = ".*" +FRONT_DOOR_LOG_LEVEL = env("FRONT_DOOR_LOG_LEVEL") # LOG_RULE_FAIL +FRONT_DOOR_RULES = [ + "front_door.rules.allowed_path", # grant access to ALLOWED_PATHS + "front_door.rules.allowed_ip", # grant access to ALLOWED_IPS + "front_door.rules.special_header", # grant access if request has Header[HEADER] == TOKEN + "front_door.rules.cookie_value", # grant access if request.COOKIES[COOKIE_NAME] +] diff --git a/src/aurora/config/fragments/hijack.py b/src/aurora/config/fragments/hijack.py new file mode 100644 index 00000000..05fb48ba --- /dev/null +++ b/src/aurora/config/fragments/hijack.py @@ -0,0 +1 @@ +HIJACK_PERMISSION_CHECK = "aurora.administration.hijack.can_impersonate" diff --git a/src/aurora/config/fragments/json_editor.py b/src/aurora/config/fragments/json_editor.py new file mode 100644 index 00000000..68785470 --- /dev/null +++ b/src/aurora/config/fragments/json_editor.py @@ -0,0 +1,4 @@ +JSON_EDITOR_JS = "https://cdnjs.cloudflare.com/ajax/libs/jsoneditor/8.6.4/jsoneditor.js" +JSON_EDITOR_CSS = "https://cdnjs.cloudflare.com/ajax/libs/jsoneditor/8.6.4/jsoneditor.css" +JSON_EDITOR_INIT_JS = "django-jsoneditor/jsoneditor-init.min.js" +JSON_EDITOR_ACE_OPTIONS_JS = "django-jsoneditor/ace_options.min.js" diff --git a/src/aurora/config/fragments/matomo.py b/src/aurora/config/fragments/matomo.py new file mode 100644 index 00000000..0746019a --- /dev/null +++ b/src/aurora/config/fragments/matomo.py @@ -0,0 +1,4 @@ +from .. import env + +MATOMO_SITE = env("MATOMO_SITE") +MATOMO_ID = env("MATOMO_ID") diff --git a/src/aurora/config/fragments/mdeditor.py b/src/aurora/config/fragments/mdeditor.py new file mode 100644 index 00000000..f04d4f24 --- /dev/null +++ b/src/aurora/config/fragments/mdeditor.py @@ -0,0 +1,61 @@ +MDEDITOR_CONFIGS = { + "default": { + "width": "100% ", # Custom edit box width + "height": 200, # Custom edit box height + "toolbar": [ + "undo", + "redo", + "|", + "bold", + "del", + "italic", + "quote", + "ucwords", + "uppercase", + "lowercase", + "|", + "h1", + "h2", + "h3", + "h5", + "h6", + "|", + "list-ul", + "list-ol", + "hr", + "|", + "link", + "reference-link", + "image", + "code", + "preformatted-text", + "code-block", + "table", + "datetime", + "emoji", + "html-entities", + "pagebreak", + "goto-line", + "|", + "help", + "info", + "||", + "preview", + "watch", + "fullscreen", + ], # custom edit box toolbar + "theme": "default", # edit box theme, dark / default + "preview_theme": "default", # Preview area theme, dark / default + "editor_theme": "default", # edit area theme, pastel-on-dark / default + "toolbar_autofixed": False, # Whether the toolbar capitals + "search_replace": True, # Whether to open the search for replacement + "emoji": True, # whether to open the expression function + "tex": True, # whether to open the tex chart function + "flow_chart": True, # whether to open the flow chart function + "sequence": True, # Whether to open the sequence diagram function + "watch": True, # Live preview + "lineWrapping": True, # lineWrapping + "lineNumbers": True, # lineNumbers + "language": "en", # zh / en / es + } +} diff --git a/src/aurora/config/fragments/rest_framework.py b/src/aurora/config/fragments/rest_framework.py new file mode 100644 index 00000000..2480396f --- /dev/null +++ b/src/aurora/config/fragments/rest_framework.py @@ -0,0 +1,19 @@ +REST_FRAMEWORK = { + "DEFAULT_PAGINATION_CLASS": "rest_framework.pagination.PageNumberPagination", + "DEFAULT_VERSIONING_CLASS": "rest_framework.versioning.NamespaceVersioning", + "DEFAULT_FILTER_BACKENDS": ("django_filters.rest_framework.DjangoFilterBackend",), + "DEFAULT_RENDERER_CLASSES": ( + "rest_framework.renderers.JSONRenderer", + "rest_framework.renderers.BrowsableAPIRenderer", + "rest_framework_datatables.renderers.DatatablesRenderer", + ), + "PAGE_SIZE": 30, + "DEFAULT_AUTHENTICATION_CLASSES": [ + "rest_framework.authentication.BasicAuthentication", + "rest_framework.authentication.SessionAuthentication", + "rest_framework.authentication.TokenAuthentication", + ], + "DEFAULT_PERMISSION_CLASSES": [ + "rest_framework.permissions.DjangoModelPermissions", + ], +} diff --git a/src/aurora/config/fragments/reversion.py b/src/aurora/config/fragments/reversion.py new file mode 100644 index 00000000..c51220de --- /dev/null +++ b/src/aurora/config/fragments/reversion.py @@ -0,0 +1,5 @@ +# Add reversion models to admin interface: +ADD_REVERSION_ADMIN = True +# optional settings: +REVERSION_COMPARE_FOREIGN_OBJECTS_AS_ID = False +REVERSION_COMPARE_IGNORE_NOT_REGISTERED = False diff --git a/src/aurora/config/fragments/sentry.py b/src/aurora/config/fragments/sentry.py new file mode 100644 index 00000000..e4aef4e3 --- /dev/null +++ b/src/aurora/config/fragments/sentry.py @@ -0,0 +1,27 @@ +import logging + +import aurora + +from .. import env + +SENTRY_DSN = env("SENTRY_DSN") +SENTRY_PROJECT = env("SENTRY_PROJECT") +if SENTRY_DSN: + import sentry_sdk + from sentry_sdk.integrations.django import DjangoIntegration + from sentry_sdk.integrations.logging import LoggingIntegration + + sentry_logging = LoggingIntegration( + level=logging.INFO, # Capture info and above as breadcrumbs + event_level=logging.ERROR, # Send errors as events + ) + sentry_sdk.init( + dsn=SENTRY_DSN, + environment=env("SENTRY_ENVIRONMENT"), + integrations=[ + DjangoIntegration(transaction_style="url"), + sentry_logging, + ], + release=aurora.VERSION, + send_default_pii=True, + ) diff --git a/src/aurora/config/fragments/smart_admin.py b/src/aurora/config/fragments/smart_admin.py new file mode 100644 index 00000000..baedef8e --- /dev/null +++ b/src/aurora/config/fragments/smart_admin.py @@ -0,0 +1,17 @@ +SMART_ADMIN_SECTIONS = { + "Registration": ["registration", "dbtemplates", "flatpages"], + "Security": ["social_auth", "security"], + "Form Builder": ["core"], + "Organization": ["core.Organization", "core.Project"], + "Configuration": ["constance", "flags"], + "i18N": [ + "i18n", + ], + "Other": [], + "_hidden_": [], +} +SMART_ADMIN_TITLE = "=" +SMART_ADMIN_HEADER = "Aurora" +SMART_ADMIN_BOOKMARKS = "aurora.core.utils.get_bookmarks" + +SMART_ADMIN_PROFILE_LINK = True diff --git a/src/aurora/config/fragments/social_auth.py b/src/aurora/config/fragments/social_auth.py new file mode 100644 index 00000000..cda8bdf3 --- /dev/null +++ b/src/aurora/config/fragments/social_auth.py @@ -0,0 +1,41 @@ +from .. import env + +SOCIAL_AUTH_AZUREAD_TENANT_OAUTH2_SECRET = env.str("AZURE_CLIENT_SECRET") +SOCIAL_AUTH_AZUREAD_TENANT_OAUTH2_TENANT_ID = env("AZURE_TENANT_ID") +SOCIAL_AUTH_AZUREAD_TENANT_OAUTH2_KEY = env.str("AZURE_CLIENT_KEY") +SOCIAL_AUTH_RESOURCE = "https://graph.microsoft.com/" +SOCIAL_AUTH_ADMIN_USER_SEARCH_FIELDS = [ + "username", + "first_name", + "last_name", + "email", +] + +SOCIAL_AUTH_JSONFIELD_ENABLED = True +SOCIAL_AUTH_PIPELINE = ( + "aurora.core.authentication.social_details", + "social_core.pipeline.social_auth.social_uid", + "social_core.pipeline.social_auth.auth_allowed", + "social_core.pipeline.social_auth.social_user", + "social_core.pipeline.user.get_username", + "aurora.core.authentication.require_email", + "social_core.pipeline.social_auth.associate_by_email", + "aurora.core.authentication.create_user", + "social_core.pipeline.social_auth.associate_user", + "social_core.pipeline.social_auth.load_extra_data", + "aurora.core.authentication.user_details", + "aurora.core.authentication.redir_to_form", +) +SOCIAL_AUTH_AZUREAD_B2C_OAUTH2_USER_FIELDS = [ + "email", + "fullname", +] + +SOCIAL_AUTH_AZUREAD_B2C_OAUTH2_SCOPE = [ + "openid", + "email", + "profile", +] + +SOCIAL_AUTH_SANITIZE_REDIRECTS = True +SOCIAL_AUTH_JWT_LEEWAY = env("JWT_LEEWAY") diff --git a/src/aurora/config/fragments/strategy.py b/src/aurora/config/fragments/strategy.py new file mode 100644 index 00000000..fa68e7f9 --- /dev/null +++ b/src/aurora/config/fragments/strategy.py @@ -0,0 +1 @@ +STRATEGY_CLASSLOADER = "aurora.core.registry.classloader" diff --git a/src/aurora/config/fragments/sysinfo.py b/src/aurora/config/fragments/sysinfo.py new file mode 100644 index 00000000..590e04e7 --- /dev/null +++ b/src/aurora/config/fragments/sysinfo.py @@ -0,0 +1,18 @@ +def masker(key, value, config, request): + from django_sysinfo.utils import cleanse_setting + + from aurora.core.utils import is_root + + if is_root(request): + return value + return cleanse_setting(key, value, config, request) + + +SYSINFO = { + "host": True, + "os": True, + "python": True, + "modules": True, + "masker": "aurora.config.settings.masker", + "masked_environment": "API|TOKEN|KEY|SECRET|PASS|SIGNATURE|AUTH|_ID|SID|DATABASE_URL", +} diff --git a/src/aurora/config/fragments/translator.py b/src/aurora/config/fragments/translator.py new file mode 100644 index 00000000..94b974e4 --- /dev/null +++ b/src/aurora/config/fragments/translator.py @@ -0,0 +1,5 @@ +from .. import env + +TRANSLATOR_SERVICE = env("TRANSLATOR_SERVICE") +AZURE_TRANSLATOR_KEY = env("AZURE_TRANSLATOR_KEY") +AZURE_TRANSLATOR_LOCATION = env("AZURE_TRANSLATOR_LOCATION") diff --git a/src/aurora/config/settings.py b/src/aurora/config/settings.py index 5e2dc746..ebcf1d6d 100644 --- a/src/aurora/config/settings.py +++ b/src/aurora/config/settings.py @@ -1,13 +1,8 @@ import logging import mimetypes import os -from collections import OrderedDict from pathlib import Path -from django_regex.utils import RegexList - -import aurora - from . import env BASE_DIR = os.path.abspath(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) @@ -36,6 +31,7 @@ SITE_ID = env("SITE_ID") INSTALLED_APPS = [ "daphne", + "smart_env", "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.sessions", @@ -50,10 +46,9 @@ "reversion_compare", # https://github.com/jedie/django-reversion-compare "django_filters", # --- - # "aurora.admin.apps.AuroraAdminUIConfig", "smart_admin.apps.SmartLogsConfig", "smart_admin.apps.SmartTemplateConfig", - # "smart_admin.apps.SmartAuthConfig", + "smart_admin.apps.SmartAuthConfig", "smart_admin.apps.SmartConfig", "aurora.administration.apps.AuroraAdminConfig", "front_door.contrib", @@ -79,6 +74,7 @@ "simplemathcaptcha", "dbtemplates", "admin_sync", + "anymail", # --- "aurora.apps.Config", "aurora.flatpages.apps.Config", @@ -106,18 +102,25 @@ "django.contrib.auth.middleware.AuthenticationMiddleware", "django.contrib.messages.middleware.MessageMiddleware", "django.middleware.clickjacking.XFrameOptionsMiddleware", - "aurora.web.middlewares.admin.AdminSiteMiddleware", + # "aurora.web.middlewares.admin.AdminSiteMiddleware", # "aurora.web.middlewares.http2.HTTP2Middleware", "aurora.web.middlewares.minify.HtmlMinMiddleware", "django.middleware.gzip.GZipMiddleware", # "django.middleware.cache.FetchFromCacheMiddleware", "debug_toolbar.middleware.DebugToolbarMiddleware", "hijack.middleware.HijackUserMiddleware", + "csp.middleware.CSPMiddleware", ] X_FRAME_OPTIONS = "SAMEORIGIN" ROOT_URLCONF = "aurora.config.urls" +TEMPLATE_LOADERS = ( + "dbtemplates.loader.Loader", + "django.template.loaders.filesystem.Loader", + "django.template.loaders.app_directories.Loader", +) + TEMPLATES = [ { "BACKEND": "django.template.backends.django.DjangoTemplates", @@ -132,11 +135,7 @@ ], "APP_DIRS": False, "OPTIONS": { - "loaders": [ - "dbtemplates.loader.Loader", - "django.template.loaders.filesystem.Loader", - "django.template.loaders.app_directories.Loader", - ], + "loaders": TEMPLATE_LOADERS, # 'builtins': [ # 'http2.templatetags', # ], @@ -240,13 +239,13 @@ LOCALE_PATHS = (str(PACKAGE_DIR / "LOCALE"),) SESSION_COOKIE_AGE = 60 * 60 * 24 * 7 # 7 days -# SESSION_COOKIE_DOMAIN = env('SESSION_COOKIE_DOMAIN') SESSION_COOKIE_SAMESITE = "Lax" SESSION_COOKIE_SECURE = False SESSION_COOKIE_NAME = env("SESSION_COOKIE_NAME") SESSION_ENGINE = "django.contrib.sessions.backends.signed_cookies" SESSION_COOKIE_HTTPONLY = True SESSION_EXPIRE_AT_BROWSER_CLOSE = True +SESSION_COOKIE_HTTPONLY = False # for offline forms TIME_ZONE = "UTC" @@ -256,18 +255,17 @@ USE_TZ = True -# Static files (CSS, JavaScript, Images) -# https://docs.djangoproject.com/en/3.1/howto/static-files/ -# STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles") -# Ensure STATIC_ROOT exists. -# os.makedirs(STATIC_ROOT, exist_ok=True) - -# STATIC_URL = f"/static/{os.environ.get('VERSION', '')}/" STATIC_URL = env("STATIC_URL") STATIC_ROOT = env("STATIC_ROOT") + STATIC_URL # simplify nginx config -# STATICFILES_STORAGE = "django.contrib.staticfiles.storage.StaticFilesStorage" -# STATICFILES_STORAGE = "django.contrib.staticfiles.storage.ManifestStaticFilesStorage" -STATICFILES_STORAGE = env("STATICFILES_STORAGE") + +STORAGES = { + "default": { + "BACKEND": env("DEFAULT_FILE_STORAGE"), + }, + "staticfiles": { + "BACKEND": env("STATICFILES_STORAGE"), + }, +} STATICFILES_DIRS = [ os.path.join(BASE_DIR, "web/static"), @@ -286,7 +284,9 @@ CSRF_COOKIE_NAME = env("CSRF_COOKIE_NAME") CSRF_HEADER_NAME = "HTTP_X_CSRFTOKEN" -CSRF_COOKIE_SECURE = False +CSRF_TRUSTED_ORIGINS = env("CSRF_TRUSTED_ORIGINS") +CSRF_COOKIE_SECURE = env("CSRF_COOKIE_SECURE") + SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") USE_X_FORWARDED_HOST = env("USE_X_FORWARDED_HOST") @@ -300,16 +300,12 @@ EMAIL_USE_SSL = env("EMAIL_USE_SSL") EMAIL_USE_TLS = env("EMAIL_USE_TLS") -# FORM_RENDERER = 'django.forms.renderers.TemplatesSetting' LOGIN_REDIRECT_URL = "index" LOGOUT_REDIRECT_URL = "index" -# LOGIN_URL = "/login" -# USER_LOGIN_URL = "/login" LOGGING = { "version": 1, "disable_existing_loggers": False, - # "filters": {"require_debug_false": {"()": "django.utils.log.RequireDebugFalse"}}, "formatters": {"verbose": {"format": "%(levelname)s %(asctime)s %(module)s: %(message)s"}}, "handlers": { "console": {"class": "logging.StreamHandler", "formatter": "verbose"}, @@ -367,93 +363,7 @@ ] MAX_OBSERVED = 1 -SENTRY_DSN = env("SENTRY_DSN") -SENTRY_PROJECT = env("SENTRY_PROJECT") -if SENTRY_DSN: - import sentry_sdk - from sentry_sdk.integrations.django import DjangoIntegration - from sentry_sdk.integrations.logging import LoggingIntegration - - sentry_logging = LoggingIntegration( - level=logging.INFO, # Capture info and above as breadcrumbs - event_level=logging.ERROR, # Send errors as events - ) - sentry_sdk.init( - dsn=SENTRY_DSN, - environment=env("SENTRY_ENVIRONMENT", default=None), - integrations=[ - DjangoIntegration(transaction_style="url"), - sentry_logging, - ], - release=aurora.VERSION, - send_default_pii=True, - ) -CORS_ALLOWED_ORIGINS = [ - "https://excubo.unicef.io", - "http://localhost:8000", - "https://browser.sentry-cdn.com", - "https://cdnjs.cloudflare.com", - "https://login.microsoftonline.com", -] + env("CORS_ALLOWED_ORIGINS") - -CONSTANCE_ADDITIONAL_FIELDS = { - "html_minify_select": [ - "bitfield.forms.BitFormField", - {"initial": 0, "required": False, "choices": (("html", "HTML"), ("line", "NEWLINE"), ("space", "SPACES"))}, - ], -} -CONSTANCE_BACKEND = "constance.backends.database.DatabaseBackend" -CONSTANCE_DATABASE_CACHE_BACKEND = env("CONSTANCE_DATABASE_CACHE_BACKEND") -CONSTANCE_CONFIG = OrderedDict( - { - "CACHE_FORMS": (False, "", bool), - "CACHE_VERSION": (1, "", int), - "HOME_PAGE_REGISTRATIONS": ("", "", str), - "SMART_ADMIN_BOOKMARKS": ( - "", - "", - str, - ), - "LOGIN_LOCAL": (True, "Enable local accounts login", bool), - "LOGIN_SSO": (True, "Enable SSO logon", bool), - "ADMIN_SYNC_REMOTE_SERVER": ("", "production server url", str), - "ADMIN_SYNC_REMOTE_ADMIN_URL": ("/admin/", "", str), - "ADMIN_SYNC_LOCAL_ADMIN_URL": ("/admin/", "", str), - "ADMIN_SYNC_USE_REVERSION": (False, "", bool), - "LOG_POST_ERRORS": (False, "", bool), - "GRAPH_API_ENABLED": (False, "Graph API is enabled ", bool), - "MINIFY_RESPONSE": (0, "select yes or no", "html_minify_select"), - "MINIFY_IGNORE_PATH": (r"", "regex for ignored path", str), - "BASE_TEMPLATE": ("base_lean.html", "Default base template", str), - "HOME_TEMPLATE": ("home.html", "Default home.html", str), - "QRCODE": (True, "Enable QRCode generation", bool), - "SHOW_REGISTER_ANOTHER": (True, "Enable QRCode generation", bool), - "MAINTENANCE_MODE": (False, "set maintenance mode On/Off", bool), - "WAF_REGISTRATION_ALLOWED_HOSTNAMES": (".*", "public website hostname (regex)", str), - "WAF_ADMIN_ALLOWED_HOSTNAMES": ("", "admin website hostname (regex)", str), - } -) -SMART_ADMIN_SECTIONS = { - "Registration": ["registration", "dbtemplates", "flatpages"], - "Security": ["social_auth", "security"], - "Form Builder": ["core"], - "Organization": ["core.Organization", "core.Project"], - "Configuration": ["constance", "flags"], - "i18N": [ - "i18n", - ], - "Other": [], - "_hidden_": [], -} -SMART_ADMIN_TITLE = "=" -SMART_ADMIN_HEADER = env("DJANGO_ADMIN_TITLE") -SMART_ADMIN_BOOKMARKS = "aurora.core.utils.get_bookmarks" - -SMART_ADMIN_PROFILE_LINK = True - -CORS_ORIGIN_ALLOW_ALL = True -CORS_ALLOW_CREDENTIALS = True RATELIMIT = { "PERIODS": { @@ -469,153 +379,14 @@ AA_PERMISSION_HANDLER = 3 # AA_PERMISSION_CREATE_USE_APPCONFIG - -def masker(key, value, config, request): - from django_sysinfo.utils import cleanse_setting - - from aurora.core.utils import is_root - - if is_root(request): - return value - return cleanse_setting(key, value, config, request) - - -SYSINFO = { - "host": True, - "os": True, - "python": True, - "modules": True, - "masker": "aurora.config.settings.masker", - "masked_environment": "API|TOKEN|KEY|SECRET|PASS|SIGNATURE|AUTH|_ID|SID|DATABASE_URL", - # "project": { - # "mail": False, - # "installed_apps": False, - # "databases": False, - # "MEDIA_ROOT": False, - # "STATIC_ROOT": False, - # "CACHES": False - # }, - # "checks": None, -} - -FLAGS_STATE_LOGGING = DEBUG - -FLAGS = { - "DEVELOP_DEVELOPER": [], - "DEVELOP_DEBUG_TOOLBAR": [], - "SENTRY_JAVASCRIPT": [], - "I18N_COLLECT_MESSAGES": [], -} - -JSON_EDITOR_JS = "https://cdnjs.cloudflare.com/ajax/libs/jsoneditor/8.6.4/jsoneditor.js" -JSON_EDITOR_CSS = "https://cdnjs.cloudflare.com/ajax/libs/jsoneditor/8.6.4/jsoneditor.css" -JSON_EDITOR_INIT_JS = "django-jsoneditor/jsoneditor-init.min.js" -JSON_EDITOR_ACE_OPTIONS_JS = "django-jsoneditor/ace_options.min.js" - -# CAPTCHA_IMAGE_SIZE = 300,200 -CAPTCHA_FONT_SIZE = 40 -CAPTCHA_CHALLENGE_FUNCT = "captcha.helpers.random_char_challenge" -CAPTCHA_TEST_MODE = env("CAPTCHA_TEST_MODE") -CAPTCHA_GET_FROM_POOL = True - - -# CAPTCHA_CHALLENGE_FUNCT = 'captcha.helpers.math_challenge' - - -# DEBUG TOOLBAR -def show_ddt(request): # pragma: no-cover - from flags.state import flag_enabled - - if request.path in RegexList(("/tpl/.*", "/api/.*", "/dal/.*")): # pragma: no cache - return False - return flag_enabled("DEVELOP_DEBUG_TOOLBAR", request=request) - - -DEBUG_TOOLBAR_CONFIG = { - "SHOW_TOOLBAR_CALLBACK": show_ddt, - "JQUERY_URL": "", - "INSERT_BEFORE": "", - "SHOW_TEMPLATE_CONTEXT": True, -} INTERNAL_IPS = env.list("INTERNAL_IPS") -DEBUG_TOOLBAR_PANELS = [ - "debug_toolbar.panels.history.HistoryPanel", - # "debug_toolbar.panels.versions.VersionsPanel", - "aurora.ddt_panels.StatePanel", - "aurora.ddt_panels.MigrationPanel", - "debug_toolbar.panels.timer.TimerPanel", - "flags.panels.FlagsPanel", - "flags.panels.FlagChecksPanel", - "debug_toolbar.panels.settings.SettingsPanel", - "debug_toolbar.panels.headers.HeadersPanel", - "debug_toolbar.panels.request.RequestPanel", - "debug_toolbar.panels.sql.SQLPanel", - "debug_toolbar.panels.staticfiles.StaticFilesPanel", - "debug_toolbar.panels.templates.TemplatesPanel", - "debug_toolbar.panels.cache.CachePanel", - "debug_toolbar.panels.signals.SignalsPanel", - "debug_toolbar.panels.logging.LoggingPanel", - "debug_toolbar.panels.redirects.RedirectsPanel", - "debug_toolbar.panels.profiling.ProfilingPanel", -] + ROOT_TOKEN = env("ROOT_TOKEN") -CSRF_FAILURE_VIEW = "aurora.web.views.site.error_csrf" +CSRF_FAILURE_VIEW = "aurora.web.views.sites.error_csrf" -# WARNING: Do NOT touch this line before it will reach out production AUTH_USER_MODEL = "auth.User" -# AUTH_USER_MODEL = "security.AuroraUser" - -# Graph API -AZURE_GRAPH_API_BASE_URL = "https://graph.microsoft.com" -AZURE_GRAPH_API_VERSION = "v1.0" -AZURE_TOKEN_URL = "https://login.microsoftonline.com/unicef.org/oauth2/token" - -# Social Auth settings. -SOCIAL_AUTH_BACKEND_NAME = 'macioce' -SOCIAL_AUTH_AZUREAD_TENANT_OAUTH2_SECRET = env.str("AZURE_CLIENT_SECRET") -SOCIAL_AUTH_AZUREAD_TENANT_OAUTH2_TENANT_ID = env("AZURE_TENANT_ID") -SOCIAL_AUTH_AZUREAD_TENANT_OAUTH2_KEY = env.str("AZURE_CLIENT_KEY") -SOCIAL_AUTH_RESOURCE = "https://graph.microsoft.com/" -# SOCIAL_AUTH_POLICY = env("AZURE_POLICY_NAME") -# SOCIAL_AUTH_AUTHORITY_HOST = env("AZURE_AUTHORITY_HOST") -SOCIAL_AUTH_ADMIN_USER_SEARCH_FIELDS = [ - "username", - "first_name", - "last_name", - "email", -] - -SOCIAL_AUTH_JSONFIELD_ENABLED = True -SOCIAL_AUTH_PIPELINE = ( - "aurora.core.authentication.social_details", - "social_core.pipeline.social_auth.social_uid", - "social_core.pipeline.social_auth.auth_allowed", - "social_core.pipeline.social_auth.social_user", - "social_core.pipeline.user.get_username", - "aurora.core.authentication.require_email", - "social_core.pipeline.social_auth.associate_by_email", - "aurora.core.authentication.create_user", - "social_core.pipeline.social_auth.associate_user", - "social_core.pipeline.social_auth.load_extra_data", - "aurora.core.authentication.user_details", - "aurora.core.authentication.redir_to_form", -) -SOCIAL_AUTH_AZUREAD_B2C_OAUTH2_USER_FIELDS = [ - "email", - "fullname", -] - -SOCIAL_AUTH_AZUREAD_B2C_OAUTH2_SCOPE = [ - "openid", - "email", - "profile", -] -SOCIAL_AUTH_SANITIZE_REDIRECTS = True -SOCIAL_AUTH_JWT_LEEWAY = env.int("JWT_LEEWAY", 0) - -# fix admin name LOGIN_URL = "/login" LOGIN_REDIRECT_URL = "/logged-in/" @@ -626,226 +397,13 @@ def show_ddt(request): # pragma: no-cover HTTP2_PRELOAD_HEADERS = True HTTP2_PRESEND_CACHED_HEADERS = True HTTP2_SERVER_PUSH = False -# CSP -SOURCES = ( - "'self'", - "inline", - "unsafe-inline", - "data:", - "blob:", - "'unsafe-inline'", - "localhost:8000", - "unpkg.com", - "browser.sentry-cdn.com", - "cdnjs.cloudflare.com", - "unisitetracker.unicef.io", - "register.unicef.org", - "uni-hope-ukr-sr.azurefd.net", - "uni-hope-ukr-sr-dev.azurefd.net", - "uni-hope-ukr-sr-dev.unitst.org", -) -MIDDLEWARE += [ - "csp.middleware.CSPMiddleware", -] -CSP_DEFAULT_SRC = SOURCES -CSP_FRAME_ANCESTORS = ( - "'self'", -) -# CSP_SCRIPT_SRC = SOURCES -# CSP_STYLE_SRC = ( -# "'self'", -# "'data'", -# "'unsafe-inline'", -# "https://unpkg.com", -# "http://localhost:8000", -# "https://cdnjs.cloudflare.com", -# "http://cdnjs.cloudflare.com", -# -# ) -# CSP_OBJECT_SRC = ("self",) -# CSP_BASE_URI = ("self", "http://localhost:8000",) -# CSP_CONNECT_SRC = ("self",) -# CSP_FONT_SRC = ("self",) -# CSP_FRAME_SRC = ("self",) -# CSP_IMG_SRC = ("self", "data") -# CSP_MANIFEST_SRC = ("self",) -# CSP_MEDIA_SRC = ("self",) -# CSP_REPORT_URI = ("https://624948b721ea44ac2a6b4de4.endpoint.csper.io/?v=0;",) -# CSP_WORKER_SRC = ("self",) -"""default-src 'self'; -script-src 'report-sample' 'self'; -style-src 'report-sample' 'self'; -object-src 'none'; -base-uri 'self'; -connect-src 'self'; -font-src 'self'; -frame-src 'self'; -img-src 'self'; -manifest-src 'self'; -media-src 'self'; -report-uri https://624948b721ea44ac2a6b4de4.endpoint.csper.io/?v=0; -worker-src 'none'; -""" - -# CSP_INCLUDE_NONCE_IN = env("CSP_INCLUDE_NONCE_IN") -# CSP_REPORT_ONLY = env("CSP_REPORT_ONLY") -# CSP_DEFAULT_SRC = env("CSP_DEFAULT_SRC") -# CSP_SCRIPT_SRC = env("CSP_SCRIPT_SRC") SECURE_HSTS_SECONDS = 60 SECURE_HSTS_INCLUDE_SUBDOMAINS = True -# Add reversion models to admin interface: -ADD_REVERSION_ADMIN = True -# optional settings: -REVERSION_COMPARE_FOREIGN_OBJECTS_AS_ID = False -REVERSION_COMPARE_IGNORE_NOT_REGISTERED = False - -ADMIN_SYNC_CONFIG = env("ADMIN_SYNC_CONFIG") -ADMIN_SYNC_RESPONSE_HEADER = None -# these are actually used only in local development -ADMIN_SYNC_REMOTE_SERVER = env("ADMIN_SYNC_REMOTE_SERVER", default="") -ADMIN_SYNC_REMOTE_ADMIN_URL = env("ADMIN_SYNC_REMOTE_ADMIN_URL", default="") -ADMIN_SYNC_LOCAL_ADMIN_URL = env("ADMIN_SYNC_LOCAL_ADMIN_URL", default="") -# ADMIN_SYNC_USE_REVERSION= - SILENCED_SYSTEM_CHECKS = ["debug_toolbar.W006", "urls.W005", "admin_extra_buttons.PERM"] -DBTEMPLATES_USE_REVERSION = True -DBTEMPLATES_USE_CODEMIRROR = True - -CONCURRENCY_ENABLED = False -STRATEGY_CLASSLOADER = "aurora.core.registry.classloader" MIGRATION_LOCK_KEY = env("MIGRATION_LOCK_KEY") -# for offline forms -DATA_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024 * 10 -SESSION_COOKIE_HTTPONLY = False - -HIJACK_PERMISSION_CHECK = "aurora.administration.hijack.can_impersonate" - -CHANNEL_LAYERS = { - "default": { - "BACKEND": "channels_redis.core.RedisChannelLayer", - "CONFIG": { - "hosts": [env("CHANNEL_LAYER")], - }, - }, -} - -MDEDITOR_CONFIGS = { - "default": { - "width": "100% ", # Custom edit box width - "height": 200, # Custom edit box height - "toolbar": [ - "undo", - "redo", - "|", - "bold", - "del", - "italic", - "quote", - "ucwords", - "uppercase", - "lowercase", - "|", - "h1", - "h2", - "h3", - "h5", - "h6", - "|", - "list-ul", - "list-ol", - "hr", - "|", - "link", - "reference-link", - "image", - "code", - "preformatted-text", - "code-block", - "table", - "datetime", - "emoji", - "html-entities", - "pagebreak", - "goto-line", - "|", - "help", - "info", - "||", - "preview", - "watch", - "fullscreen", - ], # custom edit box toolbar - # image upload format type - # 'upload_image_formats': ["jpg", "jpeg", "gif", "png", "bmp", "webp", "svg"], - # 'image_folder': 'editor', # image save the folder name - "theme": "default", # edit box theme, dark / default - "preview_theme": "default", # Preview area theme, dark / default - "editor_theme": "default", # edit area theme, pastel-on-dark / default - "toolbar_autofixed": False, # Whether the toolbar capitals - "search_replace": True, # Whether to open the search for replacement - "emoji": True, # whether to open the expression function - "tex": True, # whether to open the tex chart function - "flow_chart": True, # whether to open the flow chart function - "sequence": True, # Whether to open the sequence diagram function - "watch": True, # Live preview - "lineWrapping": True, # lineWrapping - "lineNumbers": True, # lineNumbers - "language": "en", # zh / en / es - } -} - -REST_FRAMEWORK = { - "DEFAULT_PAGINATION_CLASS": "rest_framework.pagination.PageNumberPagination", - "DEFAULT_VERSIONING_CLASS": "rest_framework.versioning.NamespaceVersioning", - "DEFAULT_FILTER_BACKENDS": ("django_filters.rest_framework.DjangoFilterBackend",), - "DEFAULT_RENDERER_CLASSES": ( - "rest_framework.renderers.JSONRenderer", - "rest_framework.renderers.BrowsableAPIRenderer", - "rest_framework_datatables.renderers.DatatablesRenderer", - ), - "PAGE_SIZE": 30, - "DEFAULT_AUTHENTICATION_CLASSES": [ - "rest_framework.authentication.BasicAuthentication", - "rest_framework.authentication.SessionAuthentication", - "rest_framework.authentication.TokenAuthentication", - ], - "DEFAULT_PERMISSION_CLASSES": [ - "rest_framework.permissions.DjangoModelPermissions", - ], -} - -FRONT_DOOR_CONFIG = "front_door.conf.DjangoConstance" -FRONT_DOOR_ENABLED = env("FRONT_DOOR_ENABLED") -FRONT_DOOR_ALLOWED_PATHS = env("FRONT_DOOR_ALLOWED_PATHS") -FRONT_DOOR_TOKEN = env("FRONT_DOOR_TOKEN") -FRONT_DOOR_HEADER = "x-aurora" -FRONT_DOOR_COOKIE_NAME = "x-aurora" -FRONT_DOOR_COOKIE_PATTERN = ".*" -# FRONT_DOOR_ERROR_CODE = 404 -# FRONT_DOOR_REDIR_URL = "https://www.sosbob.com/" -FRONT_DOOR_LOG_LEVEL = env("FRONT_DOOR_LOG_LEVEL") # LOG_RULE_FAIL -FRONT_DOOR_RULES = [ - # "front_door.rules.internal_ip", # grant access to settings.INTERNAL_IPS - # "front_door.rules.forbidden_path", # DENY access to FORBIDDEN_PATHS - "front_door.rules.allowed_path", # grant access to ALLOWED_PATHS - "front_door.rules.allowed_ip", # grant access to ALLOWED_IPS - "front_door.rules.special_header", # grant access if request has Header[HEADER] == TOKEN - # "front_door.rules.has_header", # grant access if request has HEADER - "front_door.rules.cookie_value", # grant access if request.COOKIES[COOKIE_NAME] - # "front_door.rules.cookie_exists", # grant access ir COOKIE_NAME in request.COOKIES -] - -TRANSLATOR_SERVICE = env("TRANSLATOR_SERVICE") -AZURE_TRANSLATOR_KEY = env("AZURE_TRANSLATOR_KEY") -AZURE_TRANSLATOR_LOCATION = env("AZURE_TRANSLATOR_LOCATION") - -AZURE_CLIENT_ID = env("AZURE_CLIENT_ID") -AZURE_CLIENT_SECRET = env("AZURE_CLIENT_SECRET") - -MATOMO_SITE = env("MATOMO_SITE", default="https://unisitetracker.unicef.io/") -MATOMO_ID = env("MATOMO_ID", default="N/A") +from .fragments import * # noqa diff --git a/src/aurora/config/urls.py b/src/aurora/config/urls.py index 0c06940e..a2da380b 100644 --- a/src/aurora/config/urls.py +++ b/src/aurora/config/urls.py @@ -1,12 +1,13 @@ -import adminactions.actions as actions -import debug_toolbar from django.conf import settings from django.conf.urls.i18n import i18n_patterns from django.contrib import admin from django.urls import include, path, re_path +from adminactions import actions +import debug_toolbar + from aurora.core.views import service_worker -from aurora.web.views.site import error_404 +from aurora.web.views.sites import error_404 actions.add_to_site(admin.site) @@ -26,6 +27,7 @@ path("i18n/", include("aurora.i18n.urls")), path("__debug__/", include(debug_toolbar.urls)), path(r"serviceworker.js", service_worker, name="serviceworker"), + path(r"sysinfo/", include("django_sysinfo.urls")), ] urlpatterns += i18n_patterns( diff --git a/src/aurora/core/admin/__init__.py b/src/aurora/core/admin/__init__.py index 682e04ae..175af2ec 100644 --- a/src/aurora/core/admin/__init__.py +++ b/src/aurora/core/admin/__init__.py @@ -1,8 +1,8 @@ -from .custom_field import CustomFieldTypeAdmin -from .flex_field import FlexFormFieldAdmin -from .flex_form import FlexFormAdmin -from .formset import FormSetAdmin -from .optionset import OptionSetAdmin -from .organization import OrganizationAdmin -from .project import ProjectAdmin -from .validator import ValidatorAdmin +from .custom_field import CustomFieldTypeAdmin # noqa +from .flex_field import FlexFormFieldAdmin # noqa +from .flex_form import FlexFormAdmin # noqa +from .formset import FormSetAdmin # noqa +from .optionset import OptionSetAdmin # noqa +from .organization import OrganizationAdmin # noqa +from .project import ProjectAdmin # noqa +from .validator import ValidatorAdmin # noqa diff --git a/src/aurora/core/admin/base.py b/src/aurora/core/admin/base.py index e2a66fee..eaab9628 100644 --- a/src/aurora/core/admin/base.py +++ b/src/aurora/core/admin/base.py @@ -1,10 +1,11 @@ import logging +from django.conf import settings +from django.core.cache import caches + from admin_extra_buttons.decorators import button from admin_sync.utils import is_local from concurrency.api import disable_concurrency -from django.conf import settings -from django.core.cache import caches from reversion_compare.admin import CompareVersionAdmin from ..utils import is_root diff --git a/src/aurora/core/admin/custom_field.py b/src/aurora/core/admin/custom_field.py index d86fde65..91d21e77 100644 --- a/src/aurora/core/admin/custom_field.py +++ b/src/aurora/core/admin/custom_field.py @@ -1,10 +1,12 @@ import logging -from admin_extra_buttons.decorators import button from django import forms from django.contrib.admin import register from django.core.cache import caches from django.db.models import JSONField +from django.db.models.functions import Collate + +from admin_extra_buttons.decorators import button from jsoneditor.forms import JSONEditor from smart_admin.modeladmin import SmartModelAdmin @@ -23,11 +25,14 @@ class CustomFieldTypeAdmin(SmartModelAdmin): "base_type", "attrs", ) - search_fields = ("name",) + search_fields = ("name_deterministic",) formfield_overrides = { JSONField: {"widget": JSONEditor}, } + def get_queryset(self, request): + return super().get_queryset(request).annotate(name_deterministic=Collate("name", "und-x-icu")) + @button() def test(self, request, pk): ctx = self.get_common_context(request, pk) @@ -38,15 +43,16 @@ def test(self, request, pk): form_class_attrs = { "sample": field, } - formClass = type(forms.Form)("TestForm", (forms.Form,), form_class_attrs) + form_class = type(forms.Form)("TestForm", (forms.Form,), form_class_attrs) if request.method == "POST": - form = formClass(request.POST) + form = form_class(request.POST) if form.is_valid(): self.message_user( - request, f"Form validation success. " f"You have selected: {form.cleaned_data['sample']}" + request, + f"Form validation success. You have selected: {form.cleaned_data['sample']}", ) else: - form = formClass() + form = form_class() ctx["form"] = form return render(request, "admin/core/customfieldtype/test.html", ctx) diff --git a/src/aurora/core/admin/field_editor.py b/src/aurora/core/admin/field_editor.py index 1d9f4fa0..a393f8ce 100644 --- a/src/aurora/core/admin/field_editor.py +++ b/src/aurora/core/admin/field_editor.py @@ -1,8 +1,7 @@ import json -from django.conf import settings -from typing import Dict from django import forms +from django.conf import settings from django.core.cache import caches from django.http import HttpResponse, HttpResponseRedirect, JsonResponse from django.shortcuts import render @@ -10,7 +9,7 @@ from django.utils.functional import cached_property from aurora.core.fields.widgets import JavascriptEditor -from aurora.core.forms import VersionMedia, FlexFormBaseForm +from aurora.core.forms import FlexFormBaseForm, VersionMedia from aurora.core.models import FlexFormField, OptionSet from aurora.core.utils import merge_data @@ -29,7 +28,6 @@ def __init__(self, *args, **kwargs): class FlexFieldAttributesForm(AdvancendAttrsMixin, forms.ModelForm): required = forms.BooleanField(widget=forms.CheckboxInput, required=False) enabled = forms.BooleanField(widget=forms.CheckboxInput, required=False) - # onchange = forms.CharField(widget=JavascriptEditor(toolbar=True), required=False) def __init__(self, *args, **kwargs): kwargs["instance"] = kwargs["field"] @@ -37,7 +35,15 @@ def __init__(self, *args, **kwargs): class Meta: model = FlexFormField - fields = ("field_type", "label", "required", "enabled", "validator", "regex", "validation") + fields = ( + "field_type", + "label", + "required", + "enabled", + "validator", + "regex", + "validation", + ) class FormFieldAttributesForm(AdvancendAttrsMixin, forms.Form): @@ -49,30 +55,36 @@ class WidgetAttributesForm(AdvancendAttrsMixin, forms.Form): css_class = forms.CharField(label="Field class", required=False, help_text="Input CSS class to apply (will") extra_classes = forms.CharField(required=False, help_text="Input CSS classes to add input") fieldset = forms.CharField(label="Fieldset class", required=False, help_text="Fieldset CSS class to apply") - # onchange = forms.CharField(widget=JavascriptEditor(toolbar=True), required=False) - # onblur = forms.CharField(widget=JavascriptEditor(toolbar=True), required=False) - # onkeyup = forms.CharField(widget=JavascriptEditor(toolbar=True), required=False) def get_datasources(): v = OptionSet.objects.order_by("name").values_list("name", flat=True) - return [("", "")] + list(zip(v, v)) + return [("", "")] + list(zip(v, v, strict=True)) class SmartAttributesForm(AdvancendAttrsMixin, forms.Form): - question = forms.CharField(required=False, help_text="If set, user must check related box to display the field") + question = forms.CharField( + required=False, + help_text="If set, user must check related box to display the field", + ) question_onchange = forms.CharField( - widget=forms.Textarea, required=False, help_text="Js to tigger on 'question' check/uncheck " + widget=forms.Textarea, + required=False, + help_text="Js to tigger on 'question' check/uncheck ", ) hint = forms.CharField(required=False, help_text="Text to display above the input") description = forms.CharField(required=False, help_text="Text to display below the input") - datasource = forms.ChoiceField(choices=get_datasources, required=False, help_text="Datasource name for ajax field") + datasource = forms.ChoiceField( + choices=get_datasources, + required=False, + help_text="Datasource name for ajax field", + ) parent_datasource = forms.ChoiceField( - choices=get_datasources, required=False, help_text="Parent Datasource name for ajax field" + choices=get_datasources, + required=False, + help_text="Parent Datasource name for ajax field", ) choices = forms.JSONField(required=False) - # onchange = forms.CharField(widget=forms.Textarea, required=False, help_text="Javascript onchange event") - # onblur = forms.CharField(widget=forms.Textarea, required=False, help_text="Javascript onblur event") visible = forms.BooleanField(required=False, help_text="Hide/Show field") @@ -95,7 +107,10 @@ class EventForm(AdvancendAttrsMixin, forms.Form): DEFAULTS = { - "css": {"question": "cursor-pointer", "label": "block uppercase tracking-wide text-gray-700 font-bold mb-2"}, + "css": { + "question": "cursor-pointer", + "label": "block uppercase tracking-wide text-gray-700 font-bold mb-2", + }, } @@ -132,13 +147,13 @@ def patched_field(self): fld = self.field if config := cache.get(self.cache_key, None): forms = self.get_forms(config) - fieldForm = forms.pop("field", None) - if fieldForm.is_valid(): - for k, v in fieldForm.cleaned_data.items(): + field_form = forms.pop("field", None) + if field_form.is_valid(): + for k, v in field_form.cleaned_data.items(): setattr(fld, k, v) for prefix, frm in forms.items(): frm.is_valid() - merged = merge_data(fld.advanced, {**{prefix: frm.cleaned_data}}) + merged = merge_data(fld.advanced, {prefix: frm.cleaned_data}) fld.advanced = merged return fld @@ -151,7 +166,7 @@ def get_configuration(self): return HttpResponse(rendered, content_type="text/plain") def get_code(self): - from bs4 import BeautifulSoup as bs + from bs4 import BeautifulSoup from bs4 import formatter from pygments import highlight from pygments.formatters.html import HtmlFormatter @@ -166,16 +181,20 @@ def get_code(self): ctx["form"] = form_class() ctx["instance"] = instance code = Template( - "{% for field in form %}{% spaceless %}" - '{% include "smart/_fieldset.html" %}{% endspaceless %}{% endfor %}' + '{% for field in form %}{% spaceless %}{% include "smart/_fieldset.html" %}{% endspaceless %}{% endfor %}' ).render(Context(ctx)) formatter = formatter.HTMLFormatter(indent=2) - soup = bs(code) - prettyHTML = soup.prettify(formatter=formatter) + soup = BeautifulSoup(code) + pretty_html = soup.prettify(formatter=formatter) formatter = HtmlFormatter(style="default", full=True) - ctx["code"] = highlight(prettyHTML, HtmlLexer(), formatter) - return render(self.request, "admin/core/flexformfield/field_editor/code.html", ctx, content_type="text/html") + ctx["code"] = highlight(pretty_html, HtmlLexer(), formatter) + return render( + self.request, + "admin/core/flexformfield/field_editor/code.html", + ctx, + content_type="text/html", + ) def render(self): instance = self.patched_field.get_instance() @@ -196,13 +215,16 @@ def render(self): return render(self.request, "admin/core/flexformfield/field_editor/preview.html", ctx) - def get_forms(self, data=None) -> Dict: + def get_forms(self, data=None) -> dict: if data: return {prefix: Form(data, prefix=prefix, field=self.field) for prefix, Form in self.FORMS.items()} if self.request.method == "POST": return { prefix: Form( - self.request.POST, prefix=prefix, field=self.field, initial=get_initial(self.field, prefix) + self.request.POST, + prefix=prefix, + field=self.field, + initial=get_initial(self.field, prefix), ) for prefix, Form in self.FORMS.items() } @@ -213,7 +235,7 @@ def get_forms(self, data=None) -> Dict: def refresh(self): forms = self.get_forms() - if all(map(lambda f: f.is_valid(), forms.values())): + if all(f.is_valid() for f in forms.values()): data = self.request.POST.dict() data.pop("csrfmiddlewaretoken") cache.set(self.cache_key, data) @@ -247,6 +269,7 @@ def get(self, request, pk): def post(self, request, pk): forms = self.get_forms() - if all(map(lambda f: f.is_valid(), forms.values())): + if all(f.is_valid() for f in forms.values()): self.patched_field.save() return HttpResponseRedirect(".") + return None diff --git a/src/aurora/core/admin/filters.py b/src/aurora/core/admin/filters.py index d6f50994..1f1def25 100644 --- a/src/aurora/core/admin/filters.py +++ b/src/aurora/core/admin/filters.py @@ -1,13 +1,14 @@ import logging -from adminfilters.autocomplete import AutoCompleteFilter -from adminfilters.combo import ChoicesFieldComboFilter, RelatedFieldComboFilter from django.contrib.admin.options import IncorrectLookupParameters from django.core.cache import caches from django.core.exceptions import ValidationError from django.db.models import Q from django.urls import reverse +from adminfilters.autocomplete import AutoCompleteFilter +from adminfilters.combo import ChoicesFieldComboFilter, RelatedFieldComboFilter + from ...administration.filters import BaseAutoCompleteFilter logger = logging.getLogger(__name__) @@ -26,7 +27,7 @@ class Select2RelatedFieldComboFilter(RelatedFieldComboFilter): class ProjectFilter(AutoCompleteFilter): fk_name = "project__organization__exact" - def __init__(self, field, request, params, model, model_admin, field_path): + def __init__(self, field, request, params, model, model_admin, field_path): # noqa self.request = request super().__init__(field, request, params, model, model_admin, field_path) @@ -46,7 +47,6 @@ def has_output(self): return "project__exact" in self.request.GET def queryset(self, request, queryset): - # {'registration__exact': '30'} if not self.used_parameters: return queryset try: diff --git a/src/aurora/core/admin/flex_field.py b/src/aurora/core/admin/flex_field.py index 66ed4ef6..83a95bc5 100644 --- a/src/aurora/core/admin/flex_field.py +++ b/src/aurora/core/admin/flex_field.py @@ -1,14 +1,16 @@ import logging -from admin_extra_buttons.decorators import button, view -from admin_ordering.admin import OrderableAdmin -from adminfilters.autocomplete import AutoCompleteFilter -from adminfilters.querystring import QueryStringFilter from django import forms from django.contrib import messages from django.contrib.admin import register from django.core.cache import caches from django.db.models import JSONField +from django.db.models.functions import Collate + +from admin_extra_buttons.decorators import button, view +from admin_ordering.admin import OrderableAdmin +from adminfilters.autocomplete import AutoCompleteFilter +from adminfilters.querystring import QueryStringFilter from jsoneditor.forms import JSONEditor from smart_admin.modeladmin import SmartModelAdmin @@ -29,7 +31,20 @@ class FlexFormFieldForm(forms.ModelForm): class Meta: model = FlexFormField - exclude = () + fields = ( + "version", + "flex_form", + "label", + "name", + "field_type", + "choices", + "required", + "enabled", + "validator", + "validation", + "regex", + "advanced", + ) def clean(self): ret = super().clean() @@ -41,7 +56,7 @@ def clean(self): @register(FlexFormField) class FlexFormFieldAdmin(LoadDumpMixin, SyncMixin, ConcurrencyVersionAdmin, OrderableAdmin, SmartModelAdmin): - search_fields = ("name", "label") + search_fields = ("name_deterministic", "label") list_display = ("label", "name", "flex_form", "field_type", "required", "enabled") list_editable = ["required", "enabled"] list_filter = ( @@ -60,20 +75,18 @@ class FlexFormFieldAdmin(LoadDumpMixin, SyncMixin, ConcurrencyVersionAdmin, Orde readonly_fields = ("version", "last_update_date") def get_queryset(self, request): - return super().get_queryset(request).select_related("flex_form") + return ( + super() + .get_queryset(request) + .annotate(name_deterministic=Collate("name", "und-x-icu")) + .select_related("flex_form") + ) - # change_list_template = "reversion/change_list.html" def get_readonly_fields(self, request, obj=None): - if is_root(request): - return [] - else: - return super().get_readonly_fields(request, obj) + return super().get_readonly_fields(request, obj) if is_root(request) else [] def field_type(self, obj): - if obj.field_type: - return obj.field_type.__name__ - else: - return "[[ removed ]]" + return obj.field_type.__name__ if obj.field_type else "[[ removed ]]" def formfield_for_dbfield(self, db_field, request, **kwargs): if db_field.name == "advanced": @@ -98,8 +111,7 @@ def field_editor(self, request, pk): ret = self.editor.post(request, pk) self.message_user(request, "Saved", messages.SUCCESS) return ret - else: - return self.editor.get(request, pk) + return self.editor.get(request, pk) @view() def widget_attrs(self, request, pk): @@ -128,11 +140,7 @@ def test(self, request, pk): fld = ctx["original"] instance = fld.get_instance() ctx["debug_info"] = { - # "widget": getattr(instance, "widget", None), "field_kwargs": fld.get_field_kwargs(), - # "options": getattr(instance, "options", None), - # "choices": getattr(instance, "choices", None), - # "widget_attrs": instance.widget_attrs(instance.widget), } form_class_attrs = { "sample": instance, @@ -145,7 +153,8 @@ def test(self, request, pk): if form.is_valid(): ctx["debug_info"]["cleaned_data"] = form.cleaned_data self.message_user( - request, f"Form validation success. You have selected: {form.cleaned_data['sample']}" + request, + f"Form validation success. You have selected: {form.cleaned_data['sample']}", ) else: form = form_class() diff --git a/src/aurora/core/admin/flex_form.py b/src/aurora/core/admin/flex_form.py index ef785555..ade4d9fa 100644 --- a/src/aurora/core/admin/flex_form.py +++ b/src/aurora/core/admin/flex_form.py @@ -1,16 +1,17 @@ import logging -from admin_extra_buttons.decorators import button, view -from admin_ordering.admin import OrderableAdmin -from adminfilters.autocomplete import AutoCompleteFilter -from adminfilters.querystring import QueryStringFilter from django import forms from django.contrib import messages from django.contrib.admin import TabularInline, register from django.core.cache import caches +from django.db.models.functions import Collate + +from admin_extra_buttons.decorators import button, view +from admin_ordering.admin import OrderableAdmin +from adminfilters.autocomplete import AutoCompleteFilter +from adminfilters.querystring import QueryStringFilter from smart_admin.modeladmin import SmartModelAdmin -from ...administration.mixin import LoadDumpMixin from ..admin_sync import SyncMixin from ..models import FlexForm, FlexFormField, FormSet from ..utils import render @@ -39,15 +40,31 @@ def formfield_for_dbfield(self, db_field, request, **kwargs): class FlexFormFieldFormInline(forms.ModelForm): class Meta: model = FlexFormField - exclude = () + fields = ( + "version", + "flex_form", + "label", + "name", + "field_type", + "choices", + "required", + "enabled", + "validator", + "validation", + "regex", + "advanced", + ) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) if self.instance.pk: - self.fields["name"].widget.attrs = {"readonly": True, "tyle": "background-color:#f8f8f8;border:none"} + self.fields["name"].widget.attrs = { + "readonly": True, + "tyle": "background-color:#f8f8f8;border:none", + } -class FlexFormFieldInline(LoadDumpMixin, OrderableAdmin, TabularInline): +class FlexFormFieldInline(OrderableAdmin, TabularInline): template = "admin/core/flexformfield/tabular.html" model = FlexFormField form = FlexFormFieldFormInline @@ -58,9 +75,6 @@ class FlexFormFieldInline(LoadDumpMixin, OrderableAdmin, TabularInline): ordering_field_hide_input = True def formfield_for_choice_field(self, db_field, request, **kwargs): - # if db_field.name == "field_type": - # kwargs["widget"] = Select2Widget() - # return db_field.formfield(**kwargs) return super().formfield_for_choice_field(db_field, request, **kwargs) @@ -85,7 +99,7 @@ class FlexFormAdmin(SyncMixin, ConcurrencyVersionAdmin, SmartModelAdmin): ("formset", UsedInRFormset), ("formset__parent", UsedInRFormset), ) - search_fields = ("name",) + search_fields = ("name_deterministic",) readonly_fields = ("version", "last_update_date") autocomplete_fields = ("validator", "project") ordering = ("name",) @@ -95,6 +109,7 @@ def get_queryset(self, request): return ( super() .get_queryset(request) + .annotate(name_deterministic=Collate("name", "und-x-icu")) .prefetch_related("registration_set") .select_related( "project", @@ -130,8 +145,7 @@ def form_editor(self, request, pk): ret = self.editor.post(request, pk) self.message_user(request, "Saved", messages.SUCCESS) return ret - else: - return self.editor.get(request, pk) + return self.editor.get(request, pk) @view() def widget_attrs(self, request, pk): @@ -166,81 +180,3 @@ def test(self, request, pk): form = form_class(initial=self.object.get_initial()) ctx["form"] = form return render(request, "admin/core/flexform/test.html", ctx) - - # @view(http_basic_auth=True, permission=lambda request, obj: request.user.is_superuser) - # def export(self, request): - # try: - # frm = SyncConfigForm(request.GET) - # if frm.is_valid(): - # apps = frm.cleaned_data["apps"] - # buf = io.StringIO() - # call_command( - # "dumpdata", - # *apps, - # stdout=buf, - # exclude=["registration.Record"], - # use_natural_foreign_keys=True, - # use_natural_primary_keys=True, - # ) - # return JsonResponse(json.loads(buf.getvalue()), safe=False) - # else: - # return JsonResponse(frm.errors, status=400) - # except Exception as e: - # logger.exception(e) - # return JsonResponse({}, status=400) - - # def _get_signed_cookie(self, request, form): - # signer = Signer(request.user.password) - # return signer.sign_object(form.cleaned_data) - # - # def _get_saved_credentials(self, request): - # try: - # signer = Signer(request.user.password) - # obj: dict = signer.unsign_object(request.COOKIES.get(self.SYNC_COOKIE, {})) - # return obj - # except BadSignature: - # return {} - - # @button(label="Import") - # def _import(self, request): - # ctx = self.get_common_context(request, title="Import") - # cookies = {} - # if request.method == "POST": - # form = SyncForm(request.POST) - # if form.is_valid(): - # try: - # auth = HTTPBasicAuth(form.cleaned_data["username"], form.cleaned_data["password"]) - # if form.cleaned_data["remember"]: - # cookies = {self.SYNC_COOKIE: self._get_signed_cookie(request, form)} - # else: - # cookies = {self.SYNC_COOKIE: ""} - # url = f"{form.cleaned_data['host']}core/flexform/export/?" - # for app in form.cleaned_data["apps"]: - # url += f"apps={app}&" - # if not url.startswith("http"): - # url = f"https://{url}" - # - # workdir = Path(".").absolute() - # out = io.StringIO() - # with requests.get(url, stream=True, auth=auth) as res: - # if res.status_code != 200: - # raise Exception(str(res)) - # ctx["url"] = url - # with tempfile.NamedTemporaryFile( - # dir=workdir, prefix="~SYNC", suffix=".json", delete=not settings.DEBUG - # ) as fdst: - # fdst.write(res.content) - # with disable_concurrency(): - # fixture = (workdir / fdst.name).absolute() - # call_command("loaddata", fixture, stdout=out, verbosity=3) - # - # message = out.getvalue() - # self.message_user(request, message) - # ctx["res"] = res - # except (Exception, JSONDecodeError) as e: - # logger.exception(e) - # self.message_error_to_user(request, e) - # else: - # form = SyncForm(initial=self._get_saved_credentials(request)) - # ctx["form"] = form - # return render(request, "admin/core/flexform/import.html", ctx, cookies=cookies) diff --git a/src/aurora/core/admin/form_editor.py b/src/aurora/core/admin/form_editor.py index 486f86e5..9c28685c 100644 --- a/src/aurora/core/admin/form_editor.py +++ b/src/aurora/core/admin/form_editor.py @@ -1,5 +1,4 @@ import json -from typing import Dict from django import forms from django.core.cache import caches @@ -21,7 +20,6 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) -# class FlexFormAttributesForm(AdvancendAttrsMixin, forms.ModelForm): class Meta: model = FlexForm @@ -37,27 +35,16 @@ class EventForm(AdvancendAttrsMixin, forms.Form): validation = forms.CharField(widget=JavascriptEditor(toolbar=True), required=False) -DEFAULTS = { - # "css": {"question": "cursor-pointer", "label": "block uppercase tracking-wide text-gray-700 font-bold mb-2"}, - # "css": {"question": "cursor-pointer", "label": "block uppercase tracking-wide text-gray-700 font-bold mb-2"}, -} +DEFAULTS = {} def get_initial(form, prefix): - base = DEFAULTS.get(prefix, {}) - # for k, v in form.advanced.get(prefix, {}).items(): - # if v: - # base[k] = v - return base + return DEFAULTS.get(prefix, {}) class FormEditor: FORMS = { "frm": FlexFormAttributesForm, - # "kwargs": FormFieldAttributesForm, - # "widget": WidgetAttributesForm, - # "smart": SmartAttributesForm, - # "css": CssForm, "events": EventForm, } @@ -73,18 +60,7 @@ def flex_form(self): @cached_property def patched_form(self): - fld = self.flex_form.get_form_class() - # if config := cache.get(self.cache_key, None): - # forms = self.get_forms(config) - # fieldForm = forms.pop("field", None) - # if fieldForm.is_valid(): - # for k, v in fieldForm.cleaned_data.items(): - # setattr(fld, k, v) - # for prefix, frm in forms.items(): - # frm.is_valid() - # merged = merge_data(fld.advanced, {**{prefix: frm.cleaned_data}}) - # fld.advanced = merged - return fld + return self.flex_form.get_form_class() def patch(self, request, pk): pass @@ -95,37 +71,32 @@ def get_configuration(self): return HttpResponse(rendered, content_type="text/plain") def get_code(self): - from bs4 import BeautifulSoup as bs + from bs4 import BeautifulSoup from bs4 import formatter from pygments import highlight from pygments.formatters.html import HtmlFormatter from pygments.lexers import HtmlLexer instance = self.patched_form() - # form_class_attrs = { - # self.field.name: instance, - # } - # form_class = type(forms.Form)("TestForm", (forms.Form,), form_class_attrs) ctx = self.get_context(self.request) ctx["form"] = self.flex_form.get_form_class() ctx["instance"] = instance - # code = Template( - # "{{ form }}" - # ).render(Context(ctx)) code = get_template("smart/_form.html").render(ctx) formatter = formatter.HTMLFormatter(indent=2) - soup = bs(code) - prettyHTML = soup.prettify(formatter=formatter) + soup = BeautifulSoup(code) + pretty_html = soup.prettify(formatter=formatter) formatter = HtmlFormatter(style="default", full=True) - ctx["code"] = highlight(prettyHTML, HtmlLexer(), formatter) - return render(self.request, "admin/core/flexformfield/field_editor/code.html", ctx, content_type="text/html") + ctx["code"] = highlight(pretty_html, HtmlLexer(), formatter) + return render( + self.request, + "admin/core/flexformfield/field_editor/code.html", + ctx, + content_type="text/html", + ) def render(self): instance = self.patched_form - # form_class_attrs = { - # 'fo': instance, - # } form_class = self.flex_form.get_form_class() ctx = self.get_context(self.request) if self.request.method == "POST": @@ -140,24 +111,31 @@ def render(self): return render(self.request, "admin/core/flexform/form_editor/preview.html", ctx) - def get_forms(self, data=None) -> Dict: + def get_forms(self, data=None) -> dict: if data: return {prefix: Form(data, prefix=prefix, form=self.flex_form) for prefix, Form in self.FORMS.items()} if self.request.method == "POST": return { prefix: Form( - self.request.POST, prefix=prefix, form=self.flex_form, initial=get_initial(self.flex_form, prefix) + self.request.POST, + prefix=prefix, + form=self.flex_form, + initial=get_initial(self.flex_form, prefix), ) for prefix, Form in self.FORMS.items() } return { - prefix: Form(prefix=prefix, form=self.flex_form, initial=get_initial(self.flex_form, prefix)) + prefix: Form( + prefix=prefix, + form=self.flex_form, + initial=get_initial(self.flex_form, prefix), + ) for prefix, Form in self.FORMS.items() } def refresh(self): forms = self.get_forms() - if all(map(lambda f: f.is_valid(), forms.values())): + if all(f.is_valid() for f in forms.values()): data = self.request.POST.dict() data.pop("csrfmiddlewaretoken") cache.set(self.cache_key, data) @@ -181,6 +159,6 @@ def get(self, request, pk): def post(self, request, pk): forms = self.get_forms() - if all(map(lambda f: f.is_valid(), forms.values())): - # self.patched_f.save() + if all(f.is_valid() for f in forms.values()): return HttpResponseRedirect(".") + return None diff --git a/src/aurora/core/admin/formset.py b/src/aurora/core/admin/formset.py index ec5f2ea2..f390ead9 100644 --- a/src/aurora/core/admin/formset.py +++ b/src/aurora/core/admin/formset.py @@ -1,9 +1,10 @@ import logging -from adminfilters.autocomplete import AutoCompleteFilter from django.contrib.admin import register from django.core.cache import caches from django.db.models import JSONField + +from adminfilters.autocomplete import AutoCompleteFilter from jsoneditor.forms import JSONEditor from smart_admin.modeladmin import SmartModelAdmin diff --git a/src/aurora/core/admin/mixin.py b/src/aurora/core/admin/mixin.py deleted file mode 100644 index 6e93f996..00000000 --- a/src/aurora/core/admin/mixin.py +++ /dev/null @@ -1,682 +0,0 @@ -import logging - -from admin_extra_buttons.decorators import button -from admin_sync.utils import is_local -from concurrency.api import disable_concurrency -from django import forms -from django.conf import settings -from django.core.cache import caches -from reversion_compare.admin import CompareVersionAdmin -from ..utils import is_root - -logger = logging.getLogger(__name__) - -cache = caches["default"] - - -class ConcurrencyVersionAdmin(CompareVersionAdmin): - change_list_template = "admin_extra_buttons/change_list.html" - - @button(label="Recover deleted") - def _recoverlist_view(self, request): - return super().recoverlist_view(request) - - def reversion_register(self, model, **options): - options["exclude"] = ("version",) - super().reversion_register(model, **options) - - def revision_view(self, request, object_id, version_id, extra_context=None): - with disable_concurrency(): - return super().revision_view(request, object_id, version_id, extra_context) - - def recover_view(self, request, version_id, extra_context=None): - with disable_concurrency(): - return super().recover_view(request, version_id, extra_context) - - def has_change_permission(self, request, obj=None): - orig = super().has_change_permission(request, obj) - return orig and (settings.DEBUG or is_root(request) or is_local(request)) - - -# -# class Select2FieldComboFilter(ChoicesFieldComboFilter): -# template = "adminfilters/select2.html" -# -# -# class Select2RelatedFieldComboFilter(RelatedFieldComboFilter): -# template = "adminfilters/select2.html" -# -# -# class ValidatorTestForm(forms.Form): -# code = forms.CharField( -# widget=JavascriptEditor, -# ) -# input = forms.CharField(widget=JavascriptEditor(toolbar=False), required=False) - -# -# @register(Organization) -# class OrganizationAdmin(SyncMixin, MPTTModelAdmin): -# list_display = ("name",) -# mptt_level_indent = 20 -# mptt_indent_field = "name" -# search_fields = ("name",) -# protocol_class = AuroraSyncOrganizationProtocol -# change_list_template = "admin/core/organization/change_list.html" -# -# def admin_sync_show_inspect(self): -# return True -# -# def get_readonly_fields(self, request, obj=None): -# ro = super().get_readonly_fields(request, obj) -# if obj and obj.pk: -# ro = list(ro) + ["slug"] -# return ro - -# -# @register(Project) -# class ProjectAdmin(SyncMixin, MPTTModelAdmin): -# list_display = ("name",) -# list_filter = ("organization",) -# mptt_level_indent = 20 -# mptt_indent_field = "name" -# search_fields = ("name",) -# protocol_class = AuroraSyncProjectProtocol -# autocomplete_fields = "parent, " -# -# def get_search_results(self, request, queryset, search_term): -# queryset, may_have_duplicates = super().get_search_results(request, queryset, search_term) -# if "oid" in request.GET: -# queryset = queryset.filter(organization__id=request.GET["oid"]) -# return queryset, may_have_duplicates -# -# def get_readonly_fields(self, request, obj=None): -# ro = super().get_readonly_fields(request, obj) -# if obj and obj.pk: -# ro = list(ro) + ["slug"] -# return ro -# -# -# @register(Validator) -# class ValidatorAdmin(LoadDumpMixin, SyncMixin, ConcurrencyVersionAdmin, SmartModelAdmin): -# form = ValidatorForm -# list_editable = ("trace", "active", "draft") -# list_display = ("label", "name", "target", "used_by", "trace", "active", "draft") -# list_filter = ("target", "active", "draft", "trace") -# readonly_fields = ("version", "last_update_date") -# search_fields = ("name",) -# DEFAULTS = { -# Validator.FORM: {}, # cleaned data -# Validator.FIELD: "", # field value -# Validator.SCRIPT: "", # field value -# Validator.MODULE: [{}], -# Validator.FORMSET: {"total_form_count": 2, "errors": {}, "non_form_errors": {}, "cleaned_data": []}, -# } -# # change_list_template = "reversion/change_list.html" -# object_history_template = "reversion-compare/object_history.html" -# change_form_template = None -# inlines = [] -# -# def save_model(self, request, obj, form, change): -# super().save_model(request, obj, form, change) -# cache.set(f"validator-{request.user.pk}-{obj.pk}-status", obj.STATUS_UNKNOWN) -# -# def used_by(self, obj): -# if obj.target == Validator.FORM: -# return ", ".join(obj.flexform_set.values_list("name", flat=True)) -# elif obj.target == Validator.FIELD: -# return ", ".join(obj.flexformfield_set.values_list("name", flat=True)) -# elif obj.target == Validator.FORMSET: -# return ", ".join(obj.formset_set.values_list("name", flat=True)) -# elif obj.target == Validator.MODULE: -# return ", ".join(obj.validator_for.values_list("name", flat=True)) -# elif obj.target == Validator.SCRIPT: -# return ", ".join(obj.script_for.values_list("name", flat=True)) -# -# @button() -# def test(self, request, pk): -# ctx = self.get_common_context(request, pk) -# original = ctx["original"] -# stored = cache.get(f"validator-{request.user.pk}-{original.pk}-payload") -# ctx["traced"] = stored -# ctx["title"] = f"Test {original.target} validator: {original.name}" -# if stored: -# param = json.loads(stored) -# else: -# param = self.DEFAULTS[original.target] -# -# if request.method == "POST": -# form = ValidatorTestForm(request.POST) -# if form.is_valid(): -# self.object.code = form.cleaned_data["code"] -# self.object.save() -# # return HttpResponseRedirect("..") -# else: -# form = ValidatorTestForm( -# initial={"code": self.object.code, "input": original.jspickle(param)}, -# ) -# -# ctx["jslib"] = Validator.LIB -# ctx["is_script"] = self.object.target in [Validator.SCRIPT] -# ctx["is_validator"] = self.object.target not in [Validator.SCRIPT] -# ctx["form"] = form -# return render(request, "admin/core/validator/test.html", ctx) -# -# -# @register(FormSet) -# class FormSetAdmin(LoadDumpMixin, SyncMixin, SmartModelAdmin): -# list_display = ( -# "name", -# "title", -# "parent", -# "flex_form", -# "enabled", -# "validator", -# "min_num", -# "max_num", -# "extra", -# "dynamic", -# ) -# search_fields = ("name", "title") -# list_editable = ("enabled",) -# readonly_fields = ("version", "last_update_date") -# list_filter = ( -# ("parent", AutoCompleteFilter), -# ("flex_form", AutoCompleteFilter), -# ) -# formfield_overrides = { -# JSONField: {"widget": JSONEditor}, -# } -# -# def get_search_results(self, request, queryset, search_term): -# queryset, may_have_duplicates = super().get_search_results(request, queryset, search_term) -# if "oid" in request.GET: -# queryset = queryset.filter(flex_form__organization__id=request.GET["oid"]) -# return queryset, may_have_duplicates -# -# -# class FormSetInline(OrderableAdmin, TabularInline): -# model = FormSet -# fk_name = "parent" -# extra = 0 -# fields = ("name", "flex_form", "extra", "max_num", "min_num", "ordering") -# show_change_link = True -# ordering_field = "ordering" -# ordering_field_hide_input = True -# -# def formfield_for_dbfield(self, db_field, request, **kwargs): -# return super().formfield_for_dbfield(db_field, request, **kwargs) -# -# -# class FlexFormFieldFormInline(forms.ModelForm): -# class Meta: -# model = FlexFormField -# exclude = () -# -# def __init__(self, *args, **kwargs): -# super().__init__(*args, **kwargs) -# if self.instance.pk: -# self.fields["name"].widget.attrs = {"readonly": True, "tyle": "background-color:#f8f8f8;border:none"} -# -# -# class FlexFormFieldForm(forms.ModelForm): -# class Meta: -# model = FlexFormField -# exclude = () -# -# def clean(self): -# ret = super().clean() -# ret.setdefault("advanced", {}) -# dict_setdefault(ret["advanced"], FlexFormField.FLEX_FIELD_DEFAULT_ATTRS) -# dict_setdefault(ret["advanced"], {"kwargs": FIELD_KWARGS.get(ret["field_type"], {})}) -# return ret -# -# -# @register(FlexFormField) -# class FlexFormFieldAdmin(LoadDumpMixin, SyncMixin, ConcurrencyVersionAdmin, OrderableAdmin, SmartModelAdmin): -# search_fields = ("name", "label") -# list_display = ("label", "name", "flex_form", "field_type", "required", "enabled") -# list_editable = ["required", "enabled"] -# list_filter = ( -# ("flex_form", AutoCompleteFilter), -# ("field_type", Select2FieldComboFilter), -# QueryStringFilter, -# ) -# autocomplete_fields = ("flex_form", "validator") -# save_as = True -# formfield_overrides = { -# JSONField: {"widget": JSONEditor}, -# } -# form = FlexFormFieldForm -# ordering_field = "ordering" -# order = "ordering" -# readonly_fields = ("version", "last_update_date") -# -# def get_queryset(self, request): -# return super().get_queryset(request).select_related("flex_form") -# -# # change_list_template = "reversion/change_list.html" -# def get_readonly_fields(self, request, obj=None): -# if is_root(request): -# return [] -# else: -# return super().get_readonly_fields(request, obj) -# -# def field_type(self, obj): -# if obj.field_type: -# return obj.field_type.__name__ -# else: -# return "[[ removed ]]" -# -# def formfield_for_dbfield(self, db_field, request, **kwargs): -# if db_field.name == "advanced": -# kwargs["widget"] = JSONEditor() -# return super().formfield_for_dbfield(db_field, request, **kwargs) -# -# def formfield_for_choice_field(self, db_field, request, **kwargs): -# if db_field.name == "field_type": -# kwargs["widget"] = Select2Widget() -# return db_field.formfield(**kwargs) -# return super().formfield_for_choice_field(db_field, request, **kwargs) -# -# def get_changeform_initial_data(self, request): -# initial = super().get_changeform_initial_data(request) -# initial.setdefault("advanced", FlexFormField.FLEX_FIELD_DEFAULT_ATTRS) -# return initial -# -# @button(label="editor") -# def field_editor(self, request, pk): -# self.editor = FieldEditor(self, request, pk) -# if request.method == "POST": -# ret = self.editor.post(request, pk) -# self.message_user(request, "Saved", messages.SUCCESS) -# return ret -# else: -# return self.editor.get(request, pk) -# -# @view() -# def widget_attrs(self, request, pk): -# editor = FieldEditor(self, request, pk) -# return editor.get_configuration() -# -# @view() -# def widget_refresh(self, request, pk): -# editor = FieldEditor(self, request, pk) -# return editor.refresh() -# -# @view() -# def widget_code(self, request, pk): -# editor = FieldEditor(self, request, pk) -# return editor.get_code() -# -# @view() -# def widget_display(self, request, pk): -# editor = FieldEditor(self, request, pk) -# return editor.render() -# -# @button() -# def test(self, request, pk): -# ctx = self.get_common_context(request, pk) -# try: -# fld = ctx["original"] -# instance = fld.get_instance() -# ctx["debug_info"] = { -# # "widget": getattr(instance, "widget", None), -# "field_kwargs": fld.get_field_kwargs(), -# # "options": getattr(instance, "options", None), -# # "choices": getattr(instance, "choices", None), -# # "widget_attrs": instance.widget_attrs(instance.widget), -# } -# form_class_attrs = { -# "sample": instance, -# } -# form_class = type(forms.Form)("TestForm", (forms.Form,), form_class_attrs) -# -# if request.method == "POST": -# form = form_class(request.POST) -# -# if form.is_valid(): -# ctx["debug_info"]["cleaned_data"] = form.cleaned_data -# self.message_user( -# request, f"Form validation success. You have selected: {form.cleaned_data['sample']}" -# ) -# else: -# form = form_class() -# ctx["form"] = form -# ctx["instance"] = instance -# except Exception as e: -# logger.exception(e) -# ctx["error"] = e -# raise -# -# return render(request, "admin/core/flexformfield/test.html", ctx) - -# -# class FlexFormFieldInline(LoadDumpMixin, OrderableAdmin, TabularInline): -# template = "admin/core/flexformfield/tabular.html" -# model = FlexFormField -# form = FlexFormFieldFormInline -# fields = ("ordering", "label", "name", "required", "enabled", "field_type") -# show_change_link = True -# extra = 0 -# ordering_field = "ordering" -# ordering_field_hide_input = True -# -# def formfield_for_choice_field(self, db_field, request, **kwargs): -# # if db_field.name == "field_type": -# # kwargs["widget"] = Select2Widget() -# # return db_field.formfield(**kwargs) -# return super().formfield_for_choice_field(db_field, request, **kwargs) -# - - -class SyncConfigForm(forms.Form): - APPS = ("core", "registration") - apps = forms.MultipleChoiceField(choices=zip(APPS, APPS), widget=forms.CheckboxSelectMultiple()) - - -class SyncForm(SyncConfigForm): - host = forms.CharField() - username = forms.CharField() - password = forms.CharField(widget=forms.PasswordInput) - remember = forms.BooleanField(label="Remember me", required=False) - - -# -# class ProjectFilter(AutoCompleteFilter): -# fk_name = "project__organization__exact" -# -# def __init__(self, field, request, params, model, model_admin, field_path): -# self.request = request -# super().__init__(field, request, params, model, model_admin, field_path) -# -# def has_output(self): -# return "project__organization__exact" in self.request.GET -# -# def get_url(self): -# url = reverse("%s:autocomplete" % self.admin_site.name) -# if self.fk_name in self.request.GET: -# oid = self.request.GET[self.fk_name] -# return f"{url}?oid={oid}" -# return url -# -# # -# # class UsedByRegistration(BaseAutoCompleteFilter): -# # def has_output(self): -# # return "project__exact" in self.request.GET -# # -# # def queryset(self, request, queryset): -# # # {'registration__exact': '30'} -# # if not self.used_parameters: -# # return queryset -# # try: -# # value = self.used_parameters["registration__exact"] -# # return queryset.filter(Q(registration__exact=value) | Q(formset__parent__registration=value)) -# # except (ValueError, ValidationError) as e: -# # # Fields may raise a ValueError or ValidationError when converting -# # # the parameters to the correct type. -# # raise IncorrectLookupParameters(e) -# # -# -# class UsedInRFormset(BaseAutoCompleteFilter): -# def has_output(self): -# return "project__exact" in self.request.GET - -# -# @register(FlexForm) -# class FlexFormAdmin(SyncMixin, ConcurrencyVersionAdmin, SmartModelAdmin): -# SYNC_COOKIE = "sync" -# inlines = [ -# FlexFormFieldInline, -# FormSetInline, -# ] -# list_display = ( -# "name", -# # "validator", -# "project", -# "is_main", -# ) -# list_filter = ( -# QueryStringFilter, -# ("project__organization", AutoCompleteFilter), -# ("project", ProjectFilter), -# ("registration", UsedByRegistration), -# ("formset", UsedInRFormset), -# ("formset__parent", UsedInRFormset), -# ) -# search_fields = ("name",) -# readonly_fields = ("version", "last_update_date") -# autocomplete_fields = ("validator", "project") -# ordering = ("name",) -# save_as = True -# -# def get_queryset(self, request): -# return ( -# super() -# .get_queryset(request) -# .prefetch_related("registration_set") -# .select_related( -# "project", -# ) -# ) -# -# def is_main(self, obj): -# return obj.registration_set.exists() -# -# is_main.boolean = True -# -# @button(html_attrs={"class": "aeb-danger"}) -# def invalidate_cache(self, request): -# from ..cache import cache -# -# cache.clear() -# -# @button(label="invalidate cache", html_attrs={"class": "aeb-warn"}) -# def invalidate_cache_single(self, request, pk): -# obj = self.get_object(request, pk) -# obj.save() -# -# @button() -# def inspect(self, request, pk): -# ctx = self.get_common_context(request, pk) -# ctx["title"] = str(ctx["original"]) -# return render(request, "admin/core/flexform/inspect.html", ctx) -# -# @button(label="editor") -# def form_editor(self, request, pk): -# self.editor = FormEditor(self, request, pk) -# if request.method == "POST": -# ret = self.editor.post(request, pk) -# self.message_user(request, "Saved", messages.SUCCESS) -# return ret -# else: -# return self.editor.get(request, pk) -# -# @view() -# def widget_attrs(self, request, pk): -# editor = FormEditor(self, request, pk) -# return editor.get_configuration() -# -# @view() -# def widget_refresh(self, request, pk): -# editor = FormEditor(self, request, pk) -# return editor.refresh() -# -# @view() -# def widget_code(self, request, pk): -# editor = FormEditor(self, request, pk) -# return editor.get_code() -# -# @view() -# def widget_display(self, request, pk): -# editor = FormEditor(self, request, pk) -# return editor.render() -# -# @button() -# def test(self, request, pk): -# ctx = self.get_common_context(request, pk) -# form_class = self.object.get_form_class() -# if request.method == "POST": -# form = form_class(request.POST, initial=self.object.get_initial()) -# if form.is_valid(): -# ctx["cleaned_data"] = form.cleaned_data -# self.message_user(request, "Form is valid") -# else: -# form = form_class(initial=self.object.get_initial()) -# ctx["form"] = form -# return render(request, "admin/core/flexform/test.html", ctx) -# -# # @view(http_basic_auth=True, permission=lambda request, obj: request.user.is_superuser) -# # def export(self, request): -# # try: -# # frm = SyncConfigForm(request.GET) -# # if frm.is_valid(): -# # apps = frm.cleaned_data["apps"] -# # buf = io.StringIO() -# # call_command( -# # "dumpdata", -# # *apps, -# # stdout=buf, -# # exclude=["registration.Record"], -# # use_natural_foreign_keys=True, -# # use_natural_primary_keys=True, -# # ) -# # return JsonResponse(json.loads(buf.getvalue()), safe=False) -# # else: -# # return JsonResponse(frm.errors, status=400) -# # except Exception as e: -# # logger.exception(e) -# # return JsonResponse({}, status=400) -# -# # def _get_signed_cookie(self, request, form): -# # signer = Signer(request.user.password) -# # return signer.sign_object(form.cleaned_data) -# # -# # def _get_saved_credentials(self, request): -# # try: -# # signer = Signer(request.user.password) -# # obj: dict = signer.unsign_object(request.COOKIES.get(self.SYNC_COOKIE, {})) -# # return obj -# # except BadSignature: -# # return {} -# -# # @button(label="Import") -# # def _import(self, request): -# # ctx = self.get_common_context(request, title="Import") -# # cookies = {} -# # if request.method == "POST": -# # form = SyncForm(request.POST) -# # if form.is_valid(): -# # try: -# # auth = HTTPBasicAuth(form.cleaned_data["username"], form.cleaned_data["password"]) -# # if form.cleaned_data["remember"]: -# # cookies = {self.SYNC_COOKIE: self._get_signed_cookie(request, form)} -# # else: -# # cookies = {self.SYNC_COOKIE: ""} -# # url = f"{form.cleaned_data['host']}core/flexform/export/?" -# # for app in form.cleaned_data["apps"]: -# # url += f"apps={app}&" -# # if not url.startswith("http"): -# # url = f"https://{url}" -# # -# # workdir = Path(".").absolute() -# # out = io.StringIO() -# # with requests.get(url, stream=True, auth=auth) as res: -# # if res.status_code != 200: -# # raise Exception(str(res)) -# # ctx["url"] = url -# # with tempfile.NamedTemporaryFile( -# # dir=workdir, prefix="~SYNC", suffix=".json", delete=not settings.DEBUG -# # ) as fdst: -# # fdst.write(res.content) -# # with disable_concurrency(): -# # fixture = (workdir / fdst.name).absolute() -# # call_command("loaddata", fixture, stdout=out, verbosity=3) -# # -# # message = out.getvalue() -# # self.message_user(request, message) -# # ctx["res"] = res -# # except (Exception, JSONDecodeError) as e: -# # logger.exception(e) -# # self.message_error_to_user(request, e) -# # else: -# # form = SyncForm(initial=self._get_saved_credentials(request)) -# # ctx["form"] = form -# # return render(request, "admin/core/flexform/import.html", ctx, cookies=cookies) -# -# -# @register(OptionSet) -# class OptionSetAdmin(LoadDumpMixin, SyncMixin, ConcurrencyVersionAdmin, SmartModelAdmin): -# list_display = ( -# "name", -# "id", -# "separator", -# "comment", -# "pk_col", -# ) -# search_fields = ("name",) -# list_filter = (("data", ValueFilter.factory(lookup_name="icontains")),) -# save_as = True -# readonly_fields = ("version", "last_update_date") -# object_history_template = "reversion-compare/object_history.html" -# -# @button() -# def display_data(self, request, pk): -# ctx = self.get_common_context(request, pk, title="Data") -# obj: OptionSet = ctx["original"] -# data = [] -# for line in obj.data.split("\r\n"): -# data.append(line.split(obj.separator)) -# ctx["data"] = data -# return render(request, "admin/core/optionset/table.html", ctx) -# -# @link(change_form=True, change_list=False, html_attrs={"target": "_new"}) -# def view_json(self, button): -# original = button.context["original"] -# if original: -# try: -# button.href = original.get_api_url() -# except NoReverseMatch: -# button.href = "#" -# button.label = "Error reversing url" -# -# def change_view(self, request, object_id, form_url="", extra_context=None): -# if request.method == "POST" and "_saveasnew" in request.POST: -# object_id = None -# -# return super().change_view(request, object_id) -# -# -# @register(CustomFieldType) -# class CustomFieldTypeAdmin(SmartModelAdmin): -# list_display = ( -# "name", -# "base_type", -# "attrs", -# ) -# search_fields = ("name",) -# formfield_overrides = { -# JSONField: {"widget": JSONEditor}, -# } -# -# @button() -# def test(self, request, pk): -# ctx = self.get_common_context(request, pk) -# fld = ctx["original"] -# field_type = fld.base_type -# kwargs = fld.attrs.copy() -# field = field_type(**kwargs) -# form_class_attrs = { -# "sample": field, -# } -# formClass = type(forms.Form)("TestForm", (forms.Form,), form_class_attrs) -# -# if request.method == "POST": -# form = formClass(request.POST) -# if form.is_valid(): -# self.message_user( -# request, f"Form validation success. " f"You have selected: {form.cleaned_data['sample']}" -# ) -# else: -# form = formClass() -# ctx["form"] = form -# return render(request, "admin/core/customfieldtype/test.html", ctx) diff --git a/src/aurora/core/admin/optionset.py b/src/aurora/core/admin/optionset.py index bc83d0da..26d07f25 100644 --- a/src/aurora/core/admin/optionset.py +++ b/src/aurora/core/admin/optionset.py @@ -1,10 +1,12 @@ import logging -from admin_extra_buttons.decorators import button, link -from adminfilters.value import ValueFilter from django.contrib.admin import register from django.core.cache import caches +from django.db.models.functions import Collate from django.urls import NoReverseMatch + +from admin_extra_buttons.decorators import button, link +from adminfilters.value import ValueFilter from smart_admin.modeladmin import SmartModelAdmin from ...administration.mixin import LoadDumpMixin @@ -27,20 +29,21 @@ class OptionSetAdmin(LoadDumpMixin, SyncMixin, ConcurrencyVersionAdmin, SmartMod "comment", "pk_col", ) - search_fields = ("name",) + search_fields = ("name_deterministic",) list_filter = (("data", ValueFilter.factory(lookup_name="icontains")),) save_as = True readonly_fields = ("version", "last_update_date") object_history_template = "reversion-compare/object_history.html" exclude = ("columns",) + def get_queryset(self, request): + return super().get_queryset(request).annotate(name_deterministic=Collate("name", "und-x-icu")) + @button() def display_data(self, request, pk): ctx = self.get_common_context(request, pk, title="Data") obj: OptionSet = ctx["original"] - data = [] - for line in obj.data.split("\r\n"): - data.append(line.split(obj.separator)) + data = [line.split(obj.separator) for line in obj.data.split("\r\n")] ctx["data"] = data return render(request, "admin/core/optionset/table.html", ctx) diff --git a/src/aurora/core/admin/organization.py b/src/aurora/core/admin/organization.py index 05b7cdd8..c282f7a1 100644 --- a/src/aurora/core/admin/organization.py +++ b/src/aurora/core/admin/organization.py @@ -2,6 +2,9 @@ from django.contrib.admin import register from django.core.cache import caches +from django.db.models.functions import Collate + +from adminfilters.mixin import AdminAutoCompleteSearchMixin from mptt.admin import MPTTModelAdmin from smart_admin.mixins import LinkedObjectsMixin @@ -15,14 +18,23 @@ @register(Organization) -class OrganizationAdmin(SyncMixin, LinkedObjectsMixin, MPTTModelAdmin): +class OrganizationAdmin(SyncMixin, AdminAutoCompleteSearchMixin, LinkedObjectsMixin, MPTTModelAdmin): list_display = ("name",) mptt_level_indent = 20 mptt_indent_field = "name" - search_fields = ("name",) + search_fields = ("name_deterministic",) protocol_class = AuroraSyncOrganizationProtocol change_list_template = "admin/core/organization/change_list.html" + def get_queryset(self, request): + return ( + super() + .get_queryset(request) + .annotate( + name_deterministic=Collate("name", "und-x-icu"), + ) + ) + def admin_sync_show_inspect(self): return True diff --git a/src/aurora/core/admin/project.py b/src/aurora/core/admin/project.py index 365a44df..d0c2efe7 100644 --- a/src/aurora/core/admin/project.py +++ b/src/aurora/core/admin/project.py @@ -2,6 +2,9 @@ from django.contrib.admin import register from django.core.cache import caches +from django.db.models.functions import Collate + +from adminfilters.mixin import AdminAutoCompleteSearchMixin from mptt.admin import MPTTModelAdmin from smart_admin.mixins import LinkedObjectsMixin @@ -15,20 +18,24 @@ @register(Project) -class ProjectAdmin(SyncMixin, LinkedObjectsMixin, MPTTModelAdmin): - list_display = ("name",) +class ProjectAdmin(SyncMixin, AdminAutoCompleteSearchMixin, LinkedObjectsMixin, MPTTModelAdmin): + list_display = ("name", "organization") list_filter = ("organization",) mptt_level_indent = 20 mptt_indent_field = "name" - search_fields = ("name",) + search_fields = ("name_deterministic",) protocol_class = AuroraSyncProjectProtocol autocomplete_fields = "parent, " - def get_search_results(self, request, queryset, search_term): - queryset, may_have_duplicates = super().get_search_results(request, queryset, search_term) - if "oid" in request.GET: - queryset = queryset.filter(organization__id=request.GET["oid"]) - return queryset, may_have_duplicates + def get_queryset(self, request): + return ( + super() + .get_queryset(request) + .annotate( + name_deterministic=Collate("name", "und-x-icu"), + ) + .select_related("organization") + ) def get_readonly_fields(self, request, obj=None): ro = super().get_readonly_fields(request, obj) diff --git a/src/aurora/core/admin/protocols.py b/src/aurora/core/admin/protocols.py index 09d24c92..26b6514d 100644 --- a/src/aurora/core/admin/protocols.py +++ b/src/aurora/core/admin/protocols.py @@ -31,8 +31,7 @@ def get_related_for_field(self, obj, field): if obj not in self._visited: return [obj.parent] return [] - else: - return super().get_related_for_field(obj, field) + return super().get_related_for_field(obj, field) class AuroraSyncOrganizationProtocol(LoadDumpProtocol): diff --git a/src/aurora/core/admin/validator.py b/src/aurora/core/admin/validator.py index c976c65c..7f32dcdc 100644 --- a/src/aurora/core/admin/validator.py +++ b/src/aurora/core/admin/validator.py @@ -1,10 +1,11 @@ import json import logging -from admin_extra_buttons.decorators import button from django import forms from django.contrib.admin import register from django.core.cache import caches + +from admin_extra_buttons.decorators import button from smart_admin.modeladmin import SmartModelAdmin from ...administration.mixin import LoadDumpMixin @@ -40,9 +41,13 @@ class ValidatorAdmin(LoadDumpMixin, SyncMixin, ConcurrencyVersionAdmin, SmartMod Validator.FIELD: "", # field value Validator.SCRIPT: "", # field value Validator.MODULE: [{}], - Validator.FORMSET: {"total_form_count": 2, "errors": {}, "non_form_errors": {}, "cleaned_data": []}, + Validator.FORMSET: { + "total_form_count": 2, + "errors": {}, + "non_form_errors": {}, + "cleaned_data": [], + }, } - # change_list_template = "reversion/change_list.html" object_history_template = "reversion-compare/object_history.html" change_form_template = None inlines = [] @@ -54,14 +59,15 @@ def save_model(self, request, obj, form, change): def used_by(self, obj): if obj.target == Validator.FORM: return ", ".join(obj.flexform_set.values_list("name", flat=True)) - elif obj.target == Validator.FIELD: + if obj.target == Validator.FIELD: return ", ".join(obj.flexformfield_set.values_list("name", flat=True)) - elif obj.target == Validator.FORMSET: + if obj.target == Validator.FORMSET: return ", ".join(obj.formset_set.values_list("name", flat=True)) - elif obj.target == Validator.MODULE: + if obj.target == Validator.MODULE: return ", ".join(obj.validator_for.values_list("name", flat=True)) - elif obj.target == Validator.SCRIPT: + if obj.target == Validator.SCRIPT: return ", ".join(obj.script_for.values_list("name", flat=True)) + return None @button() def test(self, request, pk): @@ -80,7 +86,6 @@ def test(self, request, pk): if form.is_valid(): self.object.code = form.cleaned_data["code"] self.object.save() - # return HttpResponseRedirect("..") else: form = ValidatorTestForm( initial={"code": self.object.code, "input": original.jspickle(param)}, diff --git a/src/aurora/core/admin_sync.py b/src/aurora/core/admin_sync.py index 17a12ecf..a2a47862 100644 --- a/src/aurora/core/admin_sync.py +++ b/src/aurora/core/admin_sync.py @@ -1,16 +1,16 @@ import json from datetime import datetime -from typing import Dict -from admin_extra_buttons.decorators import button, view -from admin_sync.mixin import SyncMixin as SyncMixin_ -from admin_sync.perms import check_publish_permission, check_sync_permission -from admin_sync.utils import SyncResponse, is_local, is_remote, wraps from django.contrib import messages from django.contrib.admin import action from django.shortcuts import render from django.views.decorators.csrf import csrf_exempt +from admin_extra_buttons.decorators import button, view +from admin_sync.mixin import SyncMixin as SyncMixin_ +from admin_sync.perms import check_publish_permission, check_sync_permission +from admin_sync.utils import SyncResponse, is_local, is_remote, wraps + class SyncMixin(SyncMixin_): actions = ["publish_action"] @@ -25,7 +25,10 @@ class SyncMixin(SyncMixin_): def get_version(self, request, key): obj = self.model.objects.get_by_natural_key(*key.split("|")) return SyncResponse( - {"version": obj.version, "last_update_date": obj.last_update_date.strftime(self.UPDATE_FORMAT)} + { + "version": obj.version, + "last_update_date": obj.last_update_date.strftime(self.UPDATE_FORMAT), + } ) def get_remote_version(self, request, pk): @@ -41,7 +44,9 @@ def check_remote_version(self, request, pk): local_date = datetime.strptime(obj.last_update_date.strftime(self.UPDATE_FORMAT), self.UPDATE_FORMAT) if remote_date > local_date: self.message_user( - request, f"Remote Record is most recent than the local one: {remote_date}", messages.WARNING + request, + f"Remote Record is most recent than the local one: {remote_date}", + messages.WARNING, ) else: self.message_user( @@ -52,11 +57,11 @@ def check_remote_version(self, request, pk): @button(visible=is_local, order=999, permission=check_publish_permission) def publish(self, request, pk): obj = self.get_object(request, pk) - i: Dict = self.get_remote_version(request, obj) + i: dict = self.get_remote_version(request, obj) if i["version"] == obj.version: return super().publish.func(self, request, pk) - else: - self.message_user(request, "Version mismatch. Fetch before publish", messages.ERROR) + self.message_user(request, "Version mismatch. Fetch before publish", messages.ERROR) + return None @button( visible=lambda b: b.model_admin.admin_sync_show_inspect(), @@ -68,7 +73,6 @@ def admin_sync_inspect_multi(self, request): data = collector.collect(self.get_queryset(request)) context["data"] = data return render(request, "admin/admin_sync/inspect.html", context) - # return JsonResponse(c.models, safe=False) @action(description="Publish") def publish_action(self, request, queryset): diff --git a/src/aurora/core/authentication.py b/src/aurora/core/authentication.py index c0090210..9d6dcf37 100644 --- a/src/aurora/core/authentication.py +++ b/src/aurora/core/authentication.py @@ -1,6 +1,7 @@ import logging from django.contrib.auth import get_user_model + from social_core.exceptions import InvalidEmail from social_core.pipeline import social_auth from social_core.pipeline import user as social_core_user @@ -41,7 +42,7 @@ def user_details(strategy, details, backend, user=None, *args, **kwargs): def require_email(strategy, details, user=None, is_new=False, *args, **kwargs): if user and user.email: return - elif is_new and not details.get("email"): + if is_new and not details.get("email"): logger.error("Email couldn't be validated") raise InvalidEmail(strategy) diff --git a/src/aurora/core/backends.py b/src/aurora/core/backends.py index 4bfe6c72..d28994b4 100644 --- a/src/aurora/core/backends.py +++ b/src/aurora/core/backends.py @@ -6,16 +6,26 @@ User = get_user_model() -class AnyUserAuthBackend(ModelBackend): +class AnyUserAuthBackend(ModelBackend): # pragma: no cover + # Develop only backend def authenticate(self, request, username=None, password=None, **kwargs): host = request.get_host() - if settings.DEBUG and (host.startswith("localhost") or host.startswith("127.0.0.1")): + if settings.DEBUG and (host.startswith(("localhost", "127.0.0.1"))): try: - user, __ = User.objects.update_or_create( - username=username, is_staff=True, is_active=True, is_superuser=True, email=f"{username}@demo.org" - ) - user.set_password(password) - user.save() - return user + if username.startswith("admin"): + values = {"is_staff": True, "is_active": True, "is_superuser": True} + elif username.startswith("user"): + values = {"is_staff": False, "is_active": True, "is_superuser": False} + else: + values = {} + if values: + user, __ = User.objects.update_or_create( + username=username, + defaults={"email": f"{username}@demo.org", **values}, + ) + user.set_password(password) + user.save() + return user except (User.DoesNotExist, IntegrityError): pass + return None diff --git a/src/aurora/core/channels.py b/src/aurora/core/channels.py deleted file mode 100644 index 9ab1a72f..00000000 --- a/src/aurora/core/channels.py +++ /dev/null @@ -1,45 +0,0 @@ -import json -from collections import defaultdict -from urllib import parse - -from channels.generic.websocket import WebsocketConsumer -from django.core.cache import caches -from django.urls import path - -cache = caches["default"] - - -class FieldEditorConsumer(WebsocketConsumer): - def connect(self): - self.accept() - - def disconnect(self, close_code): - pass - - def receive(self, text_data=None, bytes_data=None): - data = dict(parse.parse_qsl(text_data)) - data.pop("csrfmiddlewaretoken") - config = defaultdict(dict) - for name, value in data.items(): - prefix, field_name = name.split("-") - config[prefix][field_name] = value - cache.set(self.scope["path"], config) - self.send(text_data=json.dumps({"message": data})) - - -class FieldWidgetConsumer(WebsocketConsumer): - def connect(self): - self.accept() - - def disconnect(self, close_code): - pass - - def receive(self, text_data=None, bytes_data=None): - data = parse.parse_qs(text_data) - self.send(text_data=json.dumps({"message": data})) - - -websocket_urlpatterns = [ - path("editor/field///", FieldEditorConsumer.as_asgi()), - path("widget/field///", FieldWidgetConsumer.as_asgi()), -] diff --git a/src/aurora/core/compat.py b/src/aurora/core/compat.py index 14a8a79d..31ded88f 100644 --- a/src/aurora/core/compat.py +++ b/src/aurora/core/compat.py @@ -1,4 +1,5 @@ from django import forms + from django_regex.fields import RegexField as RegexField_ from django_regex.utils import Regex from strategy_field.fields import StrategyClassField as StrategyClassField_ diff --git a/src/aurora/core/crypto.py b/src/aurora/core/crypto.py index a3304efa..561acdbf 100644 --- a/src/aurora/core/crypto.py +++ b/src/aurora/core/crypto.py @@ -2,14 +2,14 @@ import io import json import logging -from typing import Union + +from django.conf import settings from Crypto.Cipher import AES, PKCS1_OAEP from Crypto.Hash import SHA256 from Crypto.PublicKey import RSA from Crypto.Util.Padding import unpad from cryptography.fernet import Fernet -from django.conf import settings from aurora.core.utils import safe_json @@ -35,8 +35,7 @@ def encrypt(self, v): cipher_suite = Fernet(self.key) # key should be byte encrypted_text = cipher_suite.encrypt(value.encode("ascii")) - encrypted_text = base64.urlsafe_b64encode(encrypted_text).decode("ascii") - return encrypted_text + return base64.urlsafe_b64encode(encrypted_text).decode("ascii") except Exception as e: logger.exception(e) return value @@ -45,8 +44,7 @@ def decrypt(self, value): try: txt = base64.urlsafe_b64decode(value) cipher_suite = Fernet(self.key) - decoded_text = cipher_suite.decrypt(txt).decode("ascii") - return decoded_text + return cipher_suite.decrypt(txt).decode("ascii") except Exception as e: logger.exception(e) return value @@ -61,8 +59,8 @@ def __init__(self, public_pem: str = None, private_pem: str = None): private_pem = key.export_key().decode() public_pem = key.publickey().export_key().decode() - assert isinstance(public_pem, str) - assert isinstance(private_pem, str) + if not isinstance(public_pem, str) or not isinstance(private_pem, str): + raise BaseException("Public and private key need to be string") self.public_pem = public_pem.encode() self.private_pem = private_pem.encode() @@ -75,7 +73,6 @@ def decrypt(self, data): def get_public_keys(pem): public_key = PKCS1_OAEP.new(RSA.import_key(pem)) - # symmetric_key = get_random_bytes(BLOCK_SIZE * 2) symmetric_key = b"12345678901234567890123456789012" enc_symmetric_key = public_key.encrypt(symmetric_key) return symmetric_key, enc_symmetric_key @@ -88,10 +85,10 @@ def crypt(data: str, public_pem: bytes) -> bytes: symmetric_key, enc_symmetric_key = get_public_keys(public_pem) file_out.write(enc_symmetric_key) while True: - dataChunk = file_in.read(CIPHERTXT_SIZE) - if dataChunk: + data_chunk = file_in.read(CIPHERTXT_SIZE) + if data_chunk: cipher = AES.new(symmetric_key, AES.MODE_GCM) - file_out.write(cipher.nonce + b"".join(reversed(cipher.encrypt_and_digest(dataChunk)))) + file_out.write(cipher.nonce + b"".join(reversed(cipher.encrypt_and_digest(data_chunk)))) else: break file_out.seek(0) @@ -108,16 +105,16 @@ def decrypt(data: bytes, private_pem: bytes): symmetric_key = private_key.decrypt(file_in.read(enc_key_size)) nonce = file_in.read(NONCE_SIZE) while nonce: - ciphertxtTag = file_in.read(CHUNK_SIZE) + ciphertxt_tag = file_in.read(CHUNK_SIZE) cipher = AES.new(symmetric_key, AES.MODE_GCM, nonce) - file_out.write(cipher.decrypt_and_verify(ciphertxtTag[BLOCK_SIZE:], ciphertxtTag[:BLOCK_SIZE])) + file_out.write(cipher.decrypt_and_verify(ciphertxt_tag[BLOCK_SIZE:], ciphertxt_tag[:BLOCK_SIZE])) nonce = file_in.read(NONCE_SIZE) file_out.seek(0) return file_out.read().decode() -def decrypt_offline(data: str, private_pem: bytes) -> Union[str, bytes]: +def decrypt_offline(data: str, private_pem: bytes) -> str | bytes: encrypted_symmetric_key = data[:344] form_fields = data[344:] @@ -129,5 +126,4 @@ def decrypt_offline(data: str, private_pem: bytes) -> Union[str, bytes]: derived_key = base64.b64decode("LefjQ2pEXmiy/nNZvEJ43i8hJuaAnzbA1Cbn1hOuAgA=") cipher = AES.new(derived_key, AES.MODE_CBC, decrypted_symmetric_key.encode("utf-8")) - decrypted_data = unpad(cipher.decrypt(enc), 16) - return decrypted_data + return unpad(cipher.decrypt(enc), 16) diff --git a/src/aurora/core/fields/__init__.py b/src/aurora/core/fields/__init__.py index d3cba319..6aab894b 100644 --- a/src/aurora/core/fields/__init__.py +++ b/src/aurora/core/fields/__init__.py @@ -1,23 +1,27 @@ from django import forms -from . import widgets -from .captcha import CaptchaField -from .compilation_time import CompilationTimeField -from .document import DocumentField -from .file import SmartFileField -from .gis import LocationField -from .hidden import HiddenField -from .label import LabelOnlyField -from .mixins import SmartFieldMixin -from .multi_checkbox import MultiCheckboxField -from .radio import RadioField, YesNoChoice, YesNoRadio -from .remote_ip import RemoteIpField -from .select import AjaxSelectField, SelectField, SmartSelectWidget -from .webcam import WebcamField +from . import widgets # noqa +from .captcha import CaptchaField # noqa +from .compilation_time import CompilationTimeField # noqa +from .document import DocumentField # noqa +from .file import SmartFileField # noqa +from .gis import LocationField # noqa +from .hidden import HiddenField # noqa +from .label import LabelOnlyField # noqa +from .mixins import SmartFieldMixin # noqa +from .multi_checkbox import MultiCheckboxField # noqa +from .radio import RadioField, YesNoChoice, YesNoRadio # noqa +from .remote_ip import RemoteIpField # noqa +from .selected import AjaxSelectField, SelectField, SmartSelectWidget # noqa +from .webcam import WebcamField # noqa WIDGET_FOR_FORMFIELD_DEFAULTS = { forms.DateField: {"widget": widgets.SmartDateWidget}, - forms.CharField: {"widget": widgets.SmartTextWidget, "max_length": 200, "strip": True}, + forms.CharField: { + "widget": widgets.SmartTextWidget, + "max_length": 200, + "strip": True, + }, forms.IntegerField: {"widget": widgets.NumberWidget}, forms.FloatField: {"widget": widgets.NumberWidget}, forms.ChoiceField: {"widget": SmartSelectWidget}, diff --git a/src/aurora/core/fields/captcha.py b/src/aurora/core/fields/captcha.py index d58e8623..f8744443 100644 --- a/src/aurora/core/fields/captcha.py +++ b/src/aurora/core/fields/captcha.py @@ -1,4 +1,4 @@ -import random +import secrets from django import forms @@ -10,24 +10,12 @@ def get_image_for_number(n: int): - return f"{n}{random.choice(TYPES)}{random.choice(ORIENTATION)}.jpg" + return f"{n}{secrets.choice(TYPES)}{secrets.choice(ORIENTATION)}.jpg" def get_random_numbers(): - return random.randrange(100), random.randrange(100) + return secrets.randbelow(100), secrets.randbelow(100) class CaptchaField(forms.CharField): widget = CaptchaWidget - # - # def __init__(self, **kwargs): - # # kwargs["required"] = True - # # kwargs["label"] = "" - # # kwargs["help_text"] = "" - # super().__init__(**kwargs) - # - # def widget_attrs(self, widget): - # attrs = super().widget_attrs(widget) - # op = get_random_numbers() - # attrs['data-numbers'] = f"{op[0]}-{op[1]}" - # return attrs diff --git a/src/aurora/core/fields/compilation_time.py b/src/aurora/core/fields/compilation_time.py index 2e5eb72d..851bbd9d 100644 --- a/src/aurora/core/fields/compilation_time.py +++ b/src/aurora/core/fields/compilation_time.py @@ -59,7 +59,7 @@ def __init__(self, **kwargs): super().__init__(**kwargs) def to_python(self, value): - return dict(zip(["start", "elapsed", "rounds", "total"], value)) + return dict(zip(["start", "elapsed", "rounds", "total"], value, strict=True)) def widget_attrs(self, widget): attrs = super().widget_attrs(widget) diff --git a/src/aurora/core/fields/file.py b/src/aurora/core/fields/file.py index 2c9ebd8d..a4d7cf80 100644 --- a/src/aurora/core/fields/file.py +++ b/src/aurora/core/fields/file.py @@ -1,7 +1,7 @@ from django import forms from django.conf import settings -from aurora.i18n.gettext import gettext as _ +from aurora.i18n.get_text import gettext as _ class UploadFileWidget(forms.ClearableFileInput): @@ -42,9 +42,5 @@ class SmartFileField(forms.FileField): "invalid": _("No file was submitted. Check the encoding type on the form."), "missing": _("No file was submitted."), "empty": _("The submitted file is empty."), - # 'max_length': ngettext_lazy( - # 'Ensure this filename has at most %(max)d character (it has %(length)d).', - # 'Ensure this filename has at most %(max)d characters (it has %(length)d).', - # 'max'), "contradiction": _("Please either submit a file or check the clear checkbox, not both."), } diff --git a/src/aurora/core/fields/gis.py b/src/aurora/core/fields/gis.py index 8586fdeb..c2207918 100644 --- a/src/aurora/core/fields/gis.py +++ b/src/aurora/core/fields/gis.py @@ -29,9 +29,14 @@ class LocationField(forms.CharField): def __init__(self, *, max_length=None, min_length=None, strip=True, empty_value="", **kwargs): kwargs["label"] = "" kwargs["help_text"] = "" - super().__init__(max_length=None, min_length=None, strip=strip, empty_value=empty_value, **kwargs) + super().__init__( + max_length=None, + min_length=None, + strip=strip, + empty_value=empty_value, + **kwargs, + ) def to_python(self, value): decoded = base64.decodebytes(value.encode()) - # as_json = json.loads(decoded) return json.loads(decoded) diff --git a/src/aurora/core/fields/mixins.py b/src/aurora/core/fields/mixins.py index e2efe200..ea3defcd 100644 --- a/src/aurora/core/fields/mixins.py +++ b/src/aurora/core/fields/mixins.py @@ -62,23 +62,20 @@ def widget_attrs(self, widget): if not self.flex_field.required: attrs.pop("required", "") - # attrs["flex_field"] = self.flex_field attrs["data-flex-name"] = self.flex_field.name for attr in [ "onblur", "onchange", "onkeyup", ]: - if attr in self.smart_events: - if self.smart_events[attr]: - attrs[attr] = oneline(self.smart_events[attr]) + if attr in self.smart_events and self.smart_events[attr]: + attrs[attr] = oneline(self.smart_events[attr]) for attr in [ "onload", ]: - if attr in self.smart_events: - if self.smart_events[attr]: - attrs[f"data-{attr}"] = oneline(self.smart_events[attr]) + if attr in self.smart_events and self.smart_events[attr]: + attrs[f"data-{attr}"] = oneline(self.smart_events[attr]) if validation := self.smart_events.get("validation", None): attrs["data-validation"] = oneline(validation) diff --git a/src/aurora/core/fields/radio.py b/src/aurora/core/fields/radio.py index 85c30561..286501c5 100644 --- a/src/aurora/core/fields/radio.py +++ b/src/aurora/core/fields/radio.py @@ -15,7 +15,7 @@ def __init__(self, *, choices=(), **kwargs): if len(choices) != 2: raise ValueError("YesNo accept only 2 choice label") for el in choices: - if not isinstance(el, (list, tuple)) and el[0] not in ["y", "n"]: + if not isinstance(el, list | tuple) and el[0] not in ["y", "n"]: raise ValueError(f"Choice value must be 'y' or 'n' not '{el[0]}' ") super().__init__(choices=choices, **kwargs) diff --git a/src/aurora/core/fields/select.py b/src/aurora/core/fields/selected.py similarity index 98% rename from src/aurora/core/fields/select.py rename to src/aurora/core/fields/selected.py index e5a84293..81c0fe2d 100644 --- a/src/aurora/core/fields/select.py +++ b/src/aurora/core/fields/selected.py @@ -6,8 +6,8 @@ from django.urls import NoReverseMatch, reverse from django.utils.translation import get_language -from .widgets.mixins import TailWindMixin from ..version_media import VersionMedia +from .widgets.mixins import TailWindMixin logger = logging.getLogger(__name__) @@ -26,7 +26,6 @@ class AjaxSelectWidget(TailWindMixin, forms.Select): def __init__(self, attrs=None): super().__init__(attrs=attrs) self.attrs.setdefault("class", {}) - # self.attrs["class"] += " ajaxSelect" def build_attrs(self, base_attrs, extra_attrs=None): base_attrs["class"] += " ajaxSelect" diff --git a/src/aurora/core/fields/webcam.py b/src/aurora/core/fields/webcam.py index 3703271b..f67d5193 100644 --- a/src/aurora/core/fields/webcam.py +++ b/src/aurora/core/fields/webcam.py @@ -25,8 +25,7 @@ def __init__(self, attrs=None): super().__init__(attrs=attrs) def build_attrs(self, base_attrs, extra_attrs=None): - attrs = super().build_attrs(base_attrs, extra_attrs) - return attrs + return super().build_attrs(base_attrs, extra_attrs) def render(self, name, value, attrs=None, renderer=None): """Render the widget as an HTML string.""" diff --git a/src/aurora/core/fields/widgets/__init__.py b/src/aurora/core/fields/widgets/__init__.py index 7e3e7a52..bd804779 100644 --- a/src/aurora/core/fields/widgets/__init__.py +++ b/src/aurora/core/fields/widgets/__init__.py @@ -1,8 +1,7 @@ -from .datetime import SmartDateWidget -from .editor import JavascriptEditor -from .image import ImageWidget +from .date_time import SmartDateWidget # noqa +from .editor import JavascriptEditor # noqa +from .image import ImageWidget # noqa -# from .multi_checkbox import MultiCheckboxWidget -from .number import NumberWidget -from .radio import RadioWidget, YesNoRadioWidget -from .text import SmartTextWidget +from .number import NumberWidget # noqa +from .radio import RadioWidget, YesNoRadioWidget # noqa +from .text import SmartTextWidget # noqa diff --git a/src/aurora/core/fields/widgets/datetime.py b/src/aurora/core/fields/widgets/date_time.py similarity index 66% rename from src/aurora/core/fields/widgets/datetime.py rename to src/aurora/core/fields/widgets/date_time.py index 4ba10a1e..47dc6a74 100644 --- a/src/aurora/core/fields/widgets/datetime.py +++ b/src/aurora/core/fields/widgets/date_time.py @@ -1,20 +1,12 @@ from django import forms from django.conf import settings -from .mixins import TailWindMixin from ...version_media import VersionMedia +from .mixins import TailWindMixin class SmartDateWidget(TailWindMixin, forms.DateInput): - # class Media: - # js = [ - # get_versioned_static_name("datetimepicker/datepicker"s.js"), - # # "https://cdnjs.cloudflare.com/ajax/libs/jquery-datetimepicker/2.5.20/jquery.datetimepicker.full.min.js", - # get_versioned_static_name("datetimepicker/dt.js"), - # ] - # css = {"all": [get_versioned_static_name("datetimepicker/datepicker.css")]} - - def __init__(self, attrs=None, format=None): + def __init__(self, attrs=None, format=None): # noqa super().__init__(attrs=attrs, format=format) self.attrs.setdefault("class", {}) self.attrs["class"] += " vDateField " diff --git a/src/aurora/core/fields/widgets/image.py b/src/aurora/core/fields/widgets/image.py index c3ae1922..1e420fa0 100644 --- a/src/aurora/core/fields/widgets/image.py +++ b/src/aurora/core/fields/widgets/image.py @@ -1,6 +1,6 @@ from django import forms -from aurora.i18n.gettext import gettext as _ +from aurora.i18n.get_text import gettext as _ class ImageWidget(forms.ClearableFileInput): diff --git a/src/aurora/core/fields/widgets/picture.py b/src/aurora/core/fields/widgets/picture.py index 201c4453..8b9f3441 100644 --- a/src/aurora/core/fields/widgets/picture.py +++ b/src/aurora/core/fields/widgets/picture.py @@ -17,8 +17,7 @@ def __init__(self, attrs=None): super().__init__(attrs=attrs) def build_attrs(self, base_attrs, extra_attrs=None): - attrs = super().build_attrs(base_attrs, extra_attrs) - return attrs + return super().build_attrs(base_attrs, extra_attrs) def render(self, name, value, attrs=None, renderer=None): """Render the widget as an HTML string.""" diff --git a/src/aurora/core/flags.py b/src/aurora/core/flags.py index 8a9b8958..29ef805a 100644 --- a/src/aurora/core/flags.py +++ b/src/aurora/core/flags.py @@ -1,6 +1,7 @@ -from adminactions.utils import get_attr from django.conf import settings from django.core.exceptions import ValidationError + +from adminactions.utils import get_attr from flags import conditions from flags.conditions import validate_parameter @@ -47,11 +48,11 @@ def request_header(param_name, request=None, **kwargs): except ValueError: param = param_name value = "" - key = f'HTTP_{param.replace("-", "_")}' + key = f"HTTP_{param.replace('-', '_')}" if value: enabled = request.META.get(key, None) == value else: - enabled = key in request.META.keys() + enabled = key in request.META return enabled diff --git a/src/aurora/core/forms.py b/src/aurora/core/forms.py index 9b86f200..b65a19de 100644 --- a/src/aurora/core/forms.py +++ b/src/aurora/core/forms.py @@ -1,6 +1,5 @@ import csv -from adminactions.api import delimiters, quotes from django import forms from django.conf import settings from django.core.exceptions import ValidationError @@ -8,6 +7,8 @@ from django.utils import formats from django.utils.translation import gettext as _ +from adminactions.api import delimiters, quotes + from .fields.widgets import JavascriptEditor from .version_media import VersionMedia @@ -157,13 +158,13 @@ class CSVOptionsForm(forms.Form): delimiter = forms.ChoiceField( label=_("Delimiter"), required=False, - choices=list(zip(delimiters, delimiters)), + choices=list(zip(delimiters, delimiters, strict=True)), help_text=_("A one-character string used to separate fields"), ) quotechar = forms.ChoiceField( label=_("Quotechar"), required=False, - choices=list(zip(quotes, quotes)), + choices=list(zip(quotes, quotes, strict=True)), help_text=_( "A one-character string used to quote fields containing special characters, " "such as the delimiter or quotechar, or which contain new-line characters" diff --git a/src/aurora/core/handlers.py b/src/aurora/core/handlers.py index b9774372..e2350605 100644 --- a/src/aurora/core/handlers.py +++ b/src/aurora/core/handlers.py @@ -15,13 +15,9 @@ def update_cache(sender, instance, **kwargs): for r in instance.registration_set.all(): r.save() - # elif isinstance(instance, FormSet): - # instance.flex_form.save() elif isinstance(instance, FlexFormField): instance.flex_form.save() elif isinstance(instance, FlexForm): - # for r in instance.formset_set.all(): - # r.parent.save() for r in instance.registration_set.all(): r.save() diff --git a/src/aurora/core/migrations/0001_initial.py b/src/aurora/core/migrations/0001_initial.py index ec2a9fe9..a680be1b 100644 --- a/src/aurora/core/migrations/0001_initial.py +++ b/src/aurora/core/migrations/0001_initial.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [] operations = [CITextExtension()] diff --git a/src/aurora/core/migrations/0002_initial.py b/src/aurora/core/migrations/0002_initial.py index d8949028..c82018ab 100644 --- a/src/aurora/core/migrations/0002_initial.py +++ b/src/aurora/core/migrations/0002_initial.py @@ -8,7 +8,6 @@ class Migration(migrations.Migration): - initial = True dependencies = [ @@ -19,8 +18,19 @@ class Migration(migrations.Migration): migrations.CreateModel( name="CustomFieldType", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), - ("name", django.contrib.postgres.fields.citext.CICharField(max_length=100, unique=True)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "name", + django.contrib.postgres.fields.citext.CICharField(max_length=100, unique=True), + ), ("attrs", models.JSONField(default=dict)), ("regex", django_regex.fields.RegexField(blank=True, null=True)), ("clean", models.TextField(blank=True, null=True)), @@ -29,8 +39,19 @@ class Migration(migrations.Migration): migrations.CreateModel( name="FlexForm", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), - ("name", django.contrib.postgres.fields.citext.CICharField(max_length=255, unique=True)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "name", + django.contrib.postgres.fields.citext.CICharField(max_length=255, unique=True), + ), ], options={ "verbose_name": "FlexForm", @@ -40,8 +61,19 @@ class Migration(migrations.Migration): migrations.CreateModel( name="OptionSet", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), - ("name", django.contrib.postgres.fields.citext.CICharField(max_length=100)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "name", + django.contrib.postgres.fields.citext.CICharField(max_length=100), + ), ("data", models.TextField(blank=True, null=True)), ("separator", models.CharField(blank=True, default="", max_length=1)), ], @@ -49,25 +81,55 @@ class Migration(migrations.Migration): migrations.CreateModel( name="Validator", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), - ("name", django.contrib.postgres.fields.citext.CICharField(max_length=255, unique=True)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "name", + django.contrib.postgres.fields.citext.CICharField(max_length=255, unique=True), + ), ("message", models.CharField(max_length=255)), ("code", models.TextField(blank=True, null=True)), - ("target", models.CharField(choices=[("form", "Form"), ("field", "Field")], max_length=5)), + ( + "target", + models.CharField(choices=[("form", "Form"), ("field", "Field")], max_length=5), + ), ], ), migrations.CreateModel( name="FormSet", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), - ("name", django.contrib.postgres.fields.citext.CICharField(max_length=255, unique=True)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "name", + django.contrib.postgres.fields.citext.CICharField(max_length=255, unique=True), + ), ("extra", models.IntegerField(default=0)), ("dynamic", models.BooleanField(default=True)), - ("flex_form", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="core.flexform")), + ( + "flex_form", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="core.flexform"), + ), ( "parent", models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, related_name="formsets", to="core.flexform" + on_delete=django.db.models.deletion.CASCADE, + related_name="formsets", + to="core.flexform", ), ), ], @@ -90,9 +152,20 @@ class Migration(migrations.Migration): migrations.CreateModel( name="FlexFormField", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("label", models.CharField(max_length=30)), - ("name", django.contrib.postgres.fields.citext.CICharField(blank=True, max_length=30)), + ( + "name", + django.contrib.postgres.fields.citext.CICharField(blank=True, max_length=30), + ), ("field_type", strategy_field.fields.StrategyClassField()), ("choices", models.CharField(blank=True, max_length=2000, null=True)), ("required", models.BooleanField(default=False)), @@ -101,7 +174,9 @@ class Migration(migrations.Migration): ( "flex_form", models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, related_name="fields", to="core.flexform" + on_delete=django.db.models.deletion.CASCADE, + related_name="fields", + to="core.flexform", ), ), ( diff --git a/src/aurora/core/migrations/0003_customfieldtype_base_type.py b/src/aurora/core/migrations/0003_customfieldtype_base_type.py index 95b75774..ac334bb5 100644 --- a/src/aurora/core/migrations/0003_customfieldtype_base_type.py +++ b/src/aurora/core/migrations/0003_customfieldtype_base_type.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0002_initial"), ] diff --git a/src/aurora/core/migrations/0004_auto_20220308_1056.py b/src/aurora/core/migrations/0004_auto_20220308_1056.py index 20d80f67..6ca6a977 100644 --- a/src/aurora/core/migrations/0004_auto_20220308_1056.py +++ b/src/aurora/core/migrations/0004_auto_20220308_1056.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0003_customfieldtype_base_type"), ] diff --git a/src/aurora/core/migrations/0005_remove_customfieldtype_clean.py b/src/aurora/core/migrations/0005_remove_customfieldtype_clean.py index d3d3a6d7..95751c9f 100644 --- a/src/aurora/core/migrations/0005_remove_customfieldtype_clean.py +++ b/src/aurora/core/migrations/0005_remove_customfieldtype_clean.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0004_auto_20220308_1056"), ] diff --git a/src/aurora/core/migrations/0006_flexform_base_type.py b/src/aurora/core/migrations/0006_flexform_base_type.py index ec0ace0a..2ad8c171 100644 --- a/src/aurora/core/migrations/0006_flexform_base_type.py +++ b/src/aurora/core/migrations/0006_flexform_base_type.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0005_remove_customfieldtype_clean"), ] diff --git a/src/aurora/core/migrations/0007_auto_20220308_2049.py b/src/aurora/core/migrations/0007_auto_20220308_2049.py index d5e62fde..2e9277be 100644 --- a/src/aurora/core/migrations/0007_auto_20220308_2049.py +++ b/src/aurora/core/migrations/0007_auto_20220308_2049.py @@ -9,7 +9,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0006_flexform_base_type"), ] @@ -27,7 +26,9 @@ class Migration(migrations.Migration): model_name="customfieldtype", name="name", field=django.contrib.postgres.fields.citext.CICharField( - max_length=100, unique=True, validators=[django.core.validators.RegexValidator("[A-Z][a-zA-Z0-9_]*")] + max_length=100, + unique=True, + validators=[django.core.validators.RegexValidator("[A-Z][a-zA-Z0-9_]*")], ), ), migrations.AlterField( diff --git a/src/aurora/core/migrations/0008_auto_20220310_0517.py b/src/aurora/core/migrations/0008_auto_20220310_0517.py index 52f50f19..fc6806f1 100644 --- a/src/aurora/core/migrations/0008_auto_20220310_0517.py +++ b/src/aurora/core/migrations/0008_auto_20220310_0517.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0007_auto_20220308_2049"), ] diff --git a/src/aurora/core/migrations/0009_auto_20220310_1033.py b/src/aurora/core/migrations/0009_auto_20220310_1033.py index 366c56d3..d4e18dab 100644 --- a/src/aurora/core/migrations/0009_auto_20220310_1033.py +++ b/src/aurora/core/migrations/0009_auto_20220310_1033.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0008_auto_20220310_0517"), ] @@ -12,7 +11,11 @@ class Migration(migrations.Migration): operations = [ migrations.AlterModelOptions( name="formset", - options={"ordering": ["ordering"], "verbose_name": "FormSet", "verbose_name_plural": "FormSets"}, + options={ + "ordering": ["ordering"], + "verbose_name": "FormSet", + "verbose_name_plural": "FormSets", + }, ), migrations.AddField( model_name="formset", diff --git a/src/aurora/core/migrations/0010_auto_20220310_1520.py b/src/aurora/core/migrations/0010_auto_20220310_1520.py index b6b364bd..da5b542c 100644 --- a/src/aurora/core/migrations/0010_auto_20220310_1520.py +++ b/src/aurora/core/migrations/0010_auto_20220310_1520.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0009_auto_20220310_1033"), ] diff --git a/src/aurora/core/migrations/0011_flexform_version.py b/src/aurora/core/migrations/0011_flexform_version.py index d4f10c4f..a8c74624 100644 --- a/src/aurora/core/migrations/0011_flexform_version.py +++ b/src/aurora/core/migrations/0011_flexform_version.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0010_auto_20220310_1520"), ] diff --git a/src/aurora/core/migrations/0012_auto_20220311_0514.py b/src/aurora/core/migrations/0012_auto_20220311_0514.py index fbf3d8e0..c88ecbe9 100644 --- a/src/aurora/core/migrations/0012_auto_20220311_0514.py +++ b/src/aurora/core/migrations/0012_auto_20220311_0514.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0011_flexform_version"), ] diff --git a/src/aurora/core/migrations/0013_optionset_columns.py b/src/aurora/core/migrations/0013_optionset_columns.py index eea553fc..2307db16 100644 --- a/src/aurora/core/migrations/0013_optionset_columns.py +++ b/src/aurora/core/migrations/0013_optionset_columns.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0012_auto_20220311_0514"), ] diff --git a/src/aurora/core/migrations/0014_optionset_version.py b/src/aurora/core/migrations/0014_optionset_version.py index cce33be7..2ba8137f 100644 --- a/src/aurora/core/migrations/0014_optionset_version.py +++ b/src/aurora/core/migrations/0014_optionset_version.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0013_optionset_columns"), ] diff --git a/src/aurora/core/migrations/0015_auto_20220313_1209.py b/src/aurora/core/migrations/0015_auto_20220313_1209.py index 0508b480..07d777f1 100644 --- a/src/aurora/core/migrations/0015_auto_20220313_1209.py +++ b/src/aurora/core/migrations/0015_auto_20220313_1209.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0014_optionset_version"), ] diff --git a/src/aurora/core/migrations/0016_alter_optionset_version.py b/src/aurora/core/migrations/0016_alter_optionset_version.py index 6978a8bb..5e6fed76 100644 --- a/src/aurora/core/migrations/0016_alter_optionset_version.py +++ b/src/aurora/core/migrations/0016_alter_optionset_version.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0015_auto_20220313_1209"), ] diff --git a/src/aurora/core/migrations/0017_formset_max_num.py b/src/aurora/core/migrations/0017_formset_max_num.py index e5194d92..271aa787 100644 --- a/src/aurora/core/migrations/0017_formset_max_num.py +++ b/src/aurora/core/migrations/0017_formset_max_num.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0016_alter_optionset_version"), ] diff --git a/src/aurora/core/migrations/0018_rename_required_formset_min_num.py b/src/aurora/core/migrations/0018_rename_required_formset_min_num.py index a52b7b2f..f70a62be 100644 --- a/src/aurora/core/migrations/0018_rename_required_formset_min_num.py +++ b/src/aurora/core/migrations/0018_rename_required_formset_min_num.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0017_formset_max_num"), ] diff --git a/src/aurora/core/migrations/0019_alter_formset_unique_together.py b/src/aurora/core/migrations/0019_alter_formset_unique_together.py index c954cd27..b4ac0463 100644 --- a/src/aurora/core/migrations/0019_alter_formset_unique_together.py +++ b/src/aurora/core/migrations/0019_alter_formset_unique_together.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0018_rename_required_formset_min_num"), ] diff --git a/src/aurora/core/migrations/0020_auto_20220314_0657.py b/src/aurora/core/migrations/0020_auto_20220314_0657.py index 59d92a67..860e2a31 100644 --- a/src/aurora/core/migrations/0020_auto_20220314_0657.py +++ b/src/aurora/core/migrations/0020_auto_20220314_0657.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0019_alter_formset_unique_together"), ] diff --git a/src/aurora/core/migrations/0021_auto_20220315_1639.py b/src/aurora/core/migrations/0021_auto_20220315_1639.py index 44bd84d0..79faf50b 100644 --- a/src/aurora/core/migrations/0021_auto_20220315_1639.py +++ b/src/aurora/core/migrations/0021_auto_20220315_1639.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0020_auto_20220314_0657"), ] diff --git a/src/aurora/core/migrations/0022_auto_20220318_0516.py b/src/aurora/core/migrations/0022_auto_20220318_0516.py index e01d9bf4..e031a502 100644 --- a/src/aurora/core/migrations/0022_auto_20220318_0516.py +++ b/src/aurora/core/migrations/0022_auto_20220318_0516.py @@ -8,7 +8,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0021_auto_20220315_1639"), ] diff --git a/src/aurora/core/migrations/0023_alter_formset_description.py b/src/aurora/core/migrations/0023_alter_formset_description.py index 6922fbc3..9a6cf020 100644 --- a/src/aurora/core/migrations/0023_alter_formset_description.py +++ b/src/aurora/core/migrations/0023_alter_formset_description.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0022_auto_20220318_0516"), ] diff --git a/src/aurora/core/migrations/0024_formset_enabled.py b/src/aurora/core/migrations/0024_formset_enabled.py index 008ce102..baf4e386 100644 --- a/src/aurora/core/migrations/0024_formset_enabled.py +++ b/src/aurora/core/migrations/0024_formset_enabled.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0023_alter_formset_description"), ] diff --git a/src/aurora/core/migrations/0025_flexformfield_enabled.py b/src/aurora/core/migrations/0025_flexformfield_enabled.py index 514879fd..c73cff22 100644 --- a/src/aurora/core/migrations/0025_flexformfield_enabled.py +++ b/src/aurora/core/migrations/0025_flexformfield_enabled.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0024_formset_enabled"), ] diff --git a/src/aurora/core/migrations/0026_formset_advanced.py b/src/aurora/core/migrations/0026_formset_advanced.py index 32d99dec..18112f92 100644 --- a/src/aurora/core/migrations/0026_formset_advanced.py +++ b/src/aurora/core/migrations/0026_formset_advanced.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0025_flexformfield_enabled"), ] diff --git a/src/aurora/core/migrations/0027_alter_formset_advanced.py b/src/aurora/core/migrations/0027_alter_formset_advanced.py index ac599200..195bdd58 100644 --- a/src/aurora/core/migrations/0027_alter_formset_advanced.py +++ b/src/aurora/core/migrations/0027_alter_formset_advanced.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0026_formset_advanced"), ] diff --git a/src/aurora/core/migrations/0028_auto_20220321_1743.py b/src/aurora/core/migrations/0028_auto_20220321_1743.py index 51beb205..930bbf4a 100644 --- a/src/aurora/core/migrations/0028_auto_20220321_1743.py +++ b/src/aurora/core/migrations/0028_auto_20220321_1743.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0027_alter_formset_advanced"), ] @@ -26,7 +25,8 @@ class Migration(migrations.Migration): model_name="validator", name="target", field=models.CharField( - choices=[("form", "Form"), ("field", "Field"), ("formset", "Formset")], max_length=10 + choices=[("form", "Form"), ("field", "Field"), ("formset", "Formset")], + max_length=10, ), ), ] diff --git a/src/aurora/core/migrations/0029_alter_validator_target.py b/src/aurora/core/migrations/0029_alter_validator_target.py index 71986125..7c3d9faa 100644 --- a/src/aurora/core/migrations/0029_alter_validator_target.py +++ b/src/aurora/core/migrations/0029_alter_validator_target.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0028_auto_20220321_1743"), ] @@ -14,7 +13,12 @@ class Migration(migrations.Migration): model_name="validator", name="target", field=models.CharField( - choices=[("form", "Form"), ("field", "Field"), ("formset", "Formset"), ("module", "Module")], + choices=[ + ("form", "Form"), + ("field", "Field"), + ("formset", "Formset"), + ("module", "Module"), + ], max_length=10, ), ), diff --git a/src/aurora/core/migrations/0030_alter_flexformfield_name.py b/src/aurora/core/migrations/0030_alter_flexformfield_name.py index d88309ed..4292df77 100644 --- a/src/aurora/core/migrations/0030_alter_flexformfield_name.py +++ b/src/aurora/core/migrations/0030_alter_flexformfield_name.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0029_alter_validator_target"), ] diff --git a/src/aurora/core/migrations/0031_validator_trace.py b/src/aurora/core/migrations/0031_validator_trace.py index a4c826e1..f2ed44af 100644 --- a/src/aurora/core/migrations/0031_validator_trace.py +++ b/src/aurora/core/migrations/0031_validator_trace.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0030_alter_flexformfield_name"), ] diff --git a/src/aurora/core/migrations/0032_auto_20220328_0647.py b/src/aurora/core/migrations/0032_auto_20220328_0647.py index fe049b02..019af9b1 100644 --- a/src/aurora/core/migrations/0032_auto_20220328_0647.py +++ b/src/aurora/core/migrations/0032_auto_20220328_0647.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0031_validator_trace"), ] @@ -31,7 +30,9 @@ class Migration(migrations.Migration): model_name="optionset", name="name", field=django.contrib.postgres.fields.citext.CICharField( - max_length=100, unique=True, validators=[django.core.validators.RegexValidator("[a-z0-9-_]")] + max_length=100, + unique=True, + validators=[django.core.validators.RegexValidator("[a-z0-9-_]")], ), ), ] diff --git a/src/aurora/core/migrations/0033_auto_20220402_0954.py b/src/aurora/core/migrations/0033_auto_20220402_0954.py index e624655b..ecb878b7 100644 --- a/src/aurora/core/migrations/0033_auto_20220402_0954.py +++ b/src/aurora/core/migrations/0033_auto_20220402_0954.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0032_auto_20220328_0647"), ] diff --git a/src/aurora/core/migrations/0034_auto_20220402_1124.py b/src/aurora/core/migrations/0034_auto_20220402_1124.py index 75c6e175..625a925c 100644 --- a/src/aurora/core/migrations/0034_auto_20220402_1124.py +++ b/src/aurora/core/migrations/0034_auto_20220402_1124.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0033_auto_20220402_0954"), ] diff --git a/src/aurora/core/migrations/0036_optionset_last_update_date.py b/src/aurora/core/migrations/0036_optionset_last_update_date.py index 9822cd9c..29e27425 100644 --- a/src/aurora/core/migrations/0036_optionset_last_update_date.py +++ b/src/aurora/core/migrations/0036_optionset_last_update_date.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0035_auto_20220402_1124"), ] diff --git a/src/aurora/core/migrations/0037_auto_20220402_1152.py b/src/aurora/core/migrations/0037_auto_20220402_1152.py index d0b34a8d..b65b7198 100644 --- a/src/aurora/core/migrations/0037_auto_20220402_1152.py +++ b/src/aurora/core/migrations/0037_auto_20220402_1152.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0036_optionset_last_update_date"), ] diff --git a/src/aurora/core/migrations/0038_auto_20220417_0554.py b/src/aurora/core/migrations/0038_auto_20220417_0554.py index d75db422..8b63d524 100644 --- a/src/aurora/core/migrations/0038_auto_20220417_0554.py +++ b/src/aurora/core/migrations/0038_auto_20220417_0554.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0037_auto_20220402_1152"), ] diff --git a/src/aurora/core/migrations/0039_auto_20220417_0919.py b/src/aurora/core/migrations/0039_auto_20220417_0919.py index ea41fead..efecdb4f 100644 --- a/src/aurora/core/migrations/0039_auto_20220417_0919.py +++ b/src/aurora/core/migrations/0039_auto_20220417_0919.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0038_auto_20220417_0554"), ] @@ -27,13 +26,17 @@ class Migration(migrations.Migration): migrations.AlterField( model_name="validator", name="message", - field=models.CharField(help_text="Default error message if validator return 'false'.", max_length=255), + field=models.CharField( + help_text="Default error message if validator return 'false'.", + max_length=255, + ), ), migrations.AlterField( model_name="validator", name="trace", field=models.BooleanField( - default=False, help_text="Debug/Testing purposes: trace validator invocation on Sentry." + default=False, + help_text="Debug/Testing purposes: trace validator invocation on Sentry.", ), ), ] diff --git a/src/aurora/core/migrations/0040_auto_20220420_1248.py b/src/aurora/core/migrations/0040_auto_20220420_1248.py index 142cde8b..7fd2b063 100644 --- a/src/aurora/core/migrations/0040_auto_20220420_1248.py +++ b/src/aurora/core/migrations/0040_auto_20220420_1248.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0039_auto_20220417_0919"), ] @@ -14,7 +13,11 @@ class Migration(migrations.Migration): model_name="optionset", name="languages", field=models.CharField( - blank=True, default="-;-;", help_text="language code of each column.", max_length=255, null=True + blank=True, + default="-;-;", + help_text="language code of each column.", + max_length=255, + null=True, ), ), migrations.AddField( diff --git a/src/aurora/core/migrations/0041_auto_20220505_1751.py b/src/aurora/core/migrations/0041_auto_20220505_1751.py index dc930eb3..a6ee993a 100644 --- a/src/aurora/core/migrations/0041_auto_20220505_1751.py +++ b/src/aurora/core/migrations/0041_auto_20220505_1751.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0040_auto_20220420_1248"), ] @@ -14,14 +13,22 @@ class Migration(migrations.Migration): model_name="validator", name="message", field=models.CharField( - blank=True, help_text="Default error message if validator return 'false'.", max_length=255, null=True + blank=True, + help_text="Default error message if validator return 'false'.", + max_length=255, + null=True, ), ), migrations.AlterField( model_name="validator", name="target", field=models.CharField( - choices=[("form", "Form"), ("field", "Field"), ("formset", "Formset"), ("module", "Module")], + choices=[ + ("form", "Form"), + ("field", "Field"), + ("formset", "Formset"), + ("module", "Module"), + ], max_length=10, ), ), diff --git a/src/aurora/core/migrations/0042_auto_20220506_0601.py b/src/aurora/core/migrations/0042_auto_20220506_0601.py index 381f09ce..2dd04563 100644 --- a/src/aurora/core/migrations/0042_auto_20220506_0601.py +++ b/src/aurora/core/migrations/0042_auto_20220506_0601.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0041_auto_20220505_1751"), ] @@ -16,6 +15,10 @@ class Migration(migrations.Migration): ), migrations.AlterModelOptions( name="flexformfield", - options={"ordering": ["ordering"], "verbose_name": "Flex Field", "verbose_name_plural": "Flex Fields"}, + options={ + "ordering": ["ordering"], + "verbose_name": "Flex Field", + "verbose_name_plural": "Flex Fields", + }, ), ] diff --git a/src/aurora/core/migrations/0043_rename_name_validator_label.py b/src/aurora/core/migrations/0043_rename_name_validator_label.py index f2caa61c..3bd9e175 100644 --- a/src/aurora/core/migrations/0043_rename_name_validator_label.py +++ b/src/aurora/core/migrations/0043_rename_name_validator_label.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0042_auto_20220506_0601"), ] diff --git a/src/aurora/core/migrations/0044_auto_20220511_0452.py b/src/aurora/core/migrations/0044_auto_20220511_0452.py index 6648551f..595273f8 100644 --- a/src/aurora/core/migrations/0044_auto_20220511_0452.py +++ b/src/aurora/core/migrations/0044_auto_20220511_0452.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0043_rename_name_validator_label"), ] @@ -15,7 +14,11 @@ class Migration(migrations.Migration): model_name="validator", name="name", field=django.contrib.postgres.fields.citext.CICharField( - blank=True, null=True, max_length=255, unique=True, verbose_name="Function Name" + blank=True, + null=True, + max_length=255, + unique=True, + verbose_name="Function Name", ), ), migrations.AlterField( diff --git a/src/aurora/core/migrations/0045_remove_validator_message.py b/src/aurora/core/migrations/0045_remove_validator_message.py index 6afe1228..0cb41dae 100644 --- a/src/aurora/core/migrations/0045_remove_validator_message.py +++ b/src/aurora/core/migrations/0045_remove_validator_message.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0044_auto_20220511_0452"), ] diff --git a/src/aurora/core/migrations/0046_alter_validator_target.py b/src/aurora/core/migrations/0046_alter_validator_target.py index 63fe836f..1d4c9eb8 100644 --- a/src/aurora/core/migrations/0046_alter_validator_target.py +++ b/src/aurora/core/migrations/0046_alter_validator_target.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0045_remove_validator_message"), ] diff --git a/src/aurora/core/migrations/0047_validator_count_errors.py b/src/aurora/core/migrations/0047_validator_count_errors.py index 3fe15ac4..3ca12594 100644 --- a/src/aurora/core/migrations/0047_validator_count_errors.py +++ b/src/aurora/core/migrations/0047_validator_count_errors.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0046_alter_validator_target"), ] diff --git a/src/aurora/core/migrations/0048_auto_20230126_1226.py b/src/aurora/core/migrations/0048_auto_20230126_1226.py index baf0af82..99a37497 100644 --- a/src/aurora/core/migrations/0048_auto_20230126_1226.py +++ b/src/aurora/core/migrations/0048_auto_20230126_1226.py @@ -8,7 +8,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0047_validator_count_errors"), ] @@ -17,9 +16,23 @@ class Migration(migrations.Migration): migrations.CreateModel( name="Organization", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), - ("name", django.contrib.postgres.fields.citext.CICharField(max_length=100, unique=True)), - ("slug", models.SlugField(blank=True, max_length=100, null=True, unique=True)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "name", + django.contrib.postgres.fields.citext.CICharField(max_length=100, unique=True), + ), + ( + "slug", + models.SlugField(blank=True, max_length=100, null=True, unique=True), + ), ("lft", models.PositiveIntegerField(editable=False)), ("rght", models.PositiveIntegerField(editable=False)), ("tree_id", models.PositiveIntegerField(db_index=True, editable=False)), @@ -45,8 +58,19 @@ class Migration(migrations.Migration): migrations.CreateModel( name="Project", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), - ("name", django.contrib.postgres.fields.citext.CICharField(max_length=100, unique=True)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "name", + django.contrib.postgres.fields.citext.CICharField(max_length=100, unique=True), + ), ("slug", models.SlugField(blank=True, max_length=100, null=True)), ("lft", models.PositiveIntegerField(editable=False)), ("rght", models.PositiveIntegerField(editable=False)), @@ -82,6 +106,10 @@ class Migration(migrations.Migration): migrations.AddField( model_name="flexform", name="project", - field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="core.project"), + field=models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="core.project", + ), ), ] diff --git a/src/aurora/core/migrations/0049_auto_20230214_1915.py b/src/aurora/core/migrations/0049_auto_20230214_1915.py index bad991bf..3d316b03 100644 --- a/src/aurora/core/migrations/0049_auto_20230214_1915.py +++ b/src/aurora/core/migrations/0049_auto_20230214_1915.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0048_auto_20230126_1226"), ] diff --git a/src/aurora/core/migrations/0050_auto_20230307_1139.py b/src/aurora/core/migrations/0050_auto_20230307_1139.py index 473056c4..3da45783 100644 --- a/src/aurora/core/migrations/0050_auto_20230307_1139.py +++ b/src/aurora/core/migrations/0050_auto_20230307_1139.py @@ -9,7 +9,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0049_auto_20230214_1915"), ] @@ -19,14 +18,18 @@ class Migration(migrations.Migration): model_name="flexformfield", name="name", field=django.contrib.postgres.fields.citext.CICharField( - blank=True, max_length=100, validators=[django.core.validators.RegexValidator("^[a-z_0-9]*$")] + blank=True, + max_length=100, + validators=[django.core.validators.RegexValidator("^[a-z_0-9]*$")], ), ), migrations.AlterField( model_name="flexformfield", name="regex", field=aurora.core.compat.RegexField( - blank=True, null=True, validators=[aurora.core.models.RegexPatternValidator()] + blank=True, + null=True, + validators=[aurora.core.models.RegexPatternValidator()], ), ), ] diff --git a/src/aurora/core/migrations/0051_flexformfield_validation.py b/src/aurora/core/migrations/0051_flexformfield_validation.py index 6f95ea43..84bffddd 100644 --- a/src/aurora/core/migrations/0051_flexformfield_validation.py +++ b/src/aurora/core/migrations/0051_flexformfield_validation.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0050_auto_20230307_1139"), ] diff --git a/src/aurora/core/migrations/0052_flexform_advanced.py b/src/aurora/core/migrations/0052_flexform_advanced.py index 3b18a5e1..2ebf0387 100644 --- a/src/aurora/core/migrations/0052_flexform_advanced.py +++ b/src/aurora/core/migrations/0052_flexform_advanced.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0051_flexformfield_validation"), ] diff --git a/src/aurora/core/migrations/0053_auto_20230321_0604.py b/src/aurora/core/migrations/0053_auto_20230321_0604.py index 4429f979..268e6e7d 100644 --- a/src/aurora/core/migrations/0053_auto_20230321_0604.py +++ b/src/aurora/core/migrations/0053_auto_20230321_0604.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0052_flexform_advanced"), ] diff --git a/src/aurora/core/migrations/0054_auto_20231123_0605.py b/src/aurora/core/migrations/0054_auto_20231123_0605.py new file mode 100644 index 00000000..a7765ac5 --- /dev/null +++ b/src/aurora/core/migrations/0054_auto_20231123_0605.py @@ -0,0 +1,27 @@ +# Generated by Django 3.2.18 on 2023-11-23 06:05 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("core", "0053_auto_20230321_0604"), + ] + + operations = [ + migrations.AlterField( + model_name="flexform", + name="project", + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="core.project"), + ), + migrations.AlterField( + model_name="project", + name="organization", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="projects", + to="core.organization", + ), + ), + ] diff --git a/src/aurora/core/migrations/0055_alter_project_slug.py b/src/aurora/core/migrations/0055_alter_project_slug.py new file mode 100644 index 00000000..3fb1d67b --- /dev/null +++ b/src/aurora/core/migrations/0055_alter_project_slug.py @@ -0,0 +1,17 @@ +# Generated by Django 3.2.18 on 2023-11-23 06:50 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("core", "0054_auto_20231123_0605"), + ] + + operations = [ + migrations.AlterField( + model_name="project", + name="slug", + field=models.SlugField(blank=True, max_length=100), + ), + ] diff --git a/src/aurora/core/migrations/0056_case_insensitive_collation.py b/src/aurora/core/migrations/0056_case_insensitive_collation.py new file mode 100644 index 00000000..25ff454e --- /dev/null +++ b/src/aurora/core/migrations/0056_case_insensitive_collation.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.10 on 2024-05-03 06:31 +from django.contrib.postgres.operations import CreateCollation +from django.db import migrations + + +class Migration(migrations.Migration): + dependencies = [ + ("core", "0055_alter_project_slug"), + ] + + operations = [ + CreateCollation( + "_", + provider="icu", + locale="und-u-ks-level2", + deterministic=False, + ), + ] diff --git a/src/aurora/core/migrations/0057_alter_customfieldtype_name_alter_flexform_name_and_more.py b/src/aurora/core/migrations/0057_alter_customfieldtype_name_alter_flexform_name_and_more.py new file mode 100644 index 00000000..a9b673a6 --- /dev/null +++ b/src/aurora/core/migrations/0057_alter_customfieldtype_name_alter_flexform_name_and_more.py @@ -0,0 +1,80 @@ +# Generated by Django 4.2.10 on 2024-05-03 06:32 + +import django.core.validators +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("core", "0056_case_insensitive_collation"), + ] + + operations = [ + migrations.AlterField( + model_name="customfieldtype", + name="name", + field=models.CharField( + db_collation="_", + max_length=100, + unique=True, + validators=[django.core.validators.RegexValidator("[A-Z][a-zA-Z0-9_]*")], + ), + ), + migrations.AlterField( + model_name="flexform", + name="name", + field=models.CharField(db_collation="_", max_length=255, unique=True), + ), + migrations.AlterField( + model_name="flexformfield", + name="name", + field=models.CharField( + blank=True, + db_collation="_", + max_length=100, + validators=[django.core.validators.RegexValidator("^[a-z_0-9]*$")], + ), + ), + migrations.AlterField( + model_name="formset", + name="name", + field=models.CharField(max_length=255), + ), + migrations.AlterField( + model_name="optionset", + name="name", + field=models.CharField( + db_collation="_", + max_length=100, + unique=True, + validators=[django.core.validators.RegexValidator("[a-z0-9-_]")], + ), + ), + migrations.AlterField( + model_name="organization", + name="name", + field=models.CharField(db_collation="_", max_length=100, unique=True), + ), + migrations.AlterField( + model_name="project", + name="name", + field=models.CharField(db_collation="_", max_length=100, unique=True), + ), + migrations.AlterField( + model_name="validator", + name="label", + field=models.CharField(db_collation="_", max_length=255), + ), + migrations.AlterField( + model_name="validator", + name="name", + field=models.CharField( + blank=True, + db_collation="_", + max_length=255, + null=True, + unique=True, + verbose_name="Function Name", + ), + ), + ] diff --git a/src/aurora/core/migrations/0058_alter_flexformfield_ordering_alter_formset_ordering.py b/src/aurora/core/migrations/0058_alter_flexformfield_ordering_alter_formset_ordering.py new file mode 100644 index 00000000..90f9ffa2 --- /dev/null +++ b/src/aurora/core/migrations/0058_alter_flexformfield_ordering_alter_formset_ordering.py @@ -0,0 +1,22 @@ +# Generated by Django 5.1.5 on 2025-01-22 00:10 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("core", "0057_alter_customfieldtype_name_alter_flexform_name_and_more"), + ] + + operations = [ + migrations.AlterField( + model_name="flexformfield", + name="ordering", + field=models.PositiveIntegerField(db_index=True, default=0, verbose_name="ordering"), + ), + migrations.AlterField( + model_name="formset", + name="ordering", + field=models.PositiveIntegerField(db_index=True, default=0, verbose_name="ordering"), + ), + ] diff --git a/src/aurora/core/models.py b/src/aurora/core/models.py index 96c17497..e04f16df 100644 --- a/src/aurora/core/models.py +++ b/src/aurora/core/models.py @@ -4,15 +4,10 @@ from datetime import date, datetime, time from inspect import isclass from json import JSONDecodeError - -from django.contrib.admin.templatetags.admin_urls import admin_urlname from pathlib import Path -import jsonpickle -from admin_ordering.models import OrderableModel -from concurrency.fields import AutoIncVersionField from django import forms -from django.contrib.postgres.fields import CICharField +from django.contrib.admin.templatetags.admin_urls import admin_urlname from django.core.cache import caches from django.core.exceptions import ValidationError from django.core.validators import RegexValidator @@ -22,17 +17,20 @@ from django.urls import reverse from django.utils.deconstruct import deconstructible from django.utils.functional import cached_property -from django.utils.safestring import mark_safe from django.utils.translation import get_language + +from admin_ordering.models import OrderableModel +from concurrency.fields import AutoIncVersionField from mptt.fields import TreeForeignKey from mptt.managers import TreeManager from mptt.models import MPTTModel from natural_keys import NaturalKeyModel, NaturalKeyModelManager +from py_mini_racer._types import JSUndefined from py_mini_racer.py_mini_racer import MiniRacerBaseException from sentry_sdk import set_tag from strategy_field.utils import fqn -from ..i18n.gettext import gettext as _ +from ..i18n.get_text import gettext as _ from ..i18n.models import I18NModel from ..state import state from . import fields @@ -55,16 +53,18 @@ def get_admin_changelist_url(self): return reverse(admin_urlname(self._meta, "changelist")) -class OrganizationManager(TreeManager): +class OrganizationManager(NaturalKeyModelManager, TreeManager): def get_by_natural_key(self, slug): return self.get(slug=slug) -class Organization(AdminReverseMixin, MPTTModel): +class Organization(AdminReverseMixin, NaturalKeyModel, MPTTModel): + _natural_key = ("slug",) + version = AutoIncVersionField() last_update_date = models.DateTimeField(auto_now=True) - name = CICharField(max_length=100, unique=True) + name = models.CharField(max_length=100, unique=True, db_collation="_") slug = models.SlugField(max_length=100, unique=True, blank=True, null=True) parent = TreeForeignKey("self", on_delete=models.CASCADE, null=True, blank=True, related_name="children") @@ -76,27 +76,26 @@ class MPTTMeta: def __str__(self): return self.name - def natural_key(self): - return (self.slug,) - def save(self, *args, **kwargs): if self._state.adding and not self.slug: self.slug = slugify(self.name) super().save(*args, **kwargs) -class ProjectManager(TreeManager): +class ProjectManager(NaturalKeyModelManager, TreeManager): def get_by_natural_key(self, slug, org_slug): return self.get(slug=slug, organization__slug=org_slug) -class Project(AdminReverseMixin, MPTTModel): +class Project(AdminReverseMixin, NaturalKeyModel, MPTTModel): + _natural_key = ("slug", "organization") + version = AutoIncVersionField() last_update_date = models.DateTimeField(auto_now=True) - name = CICharField(max_length=100, unique=True) - slug = models.SlugField(max_length=100, blank=True, null=True) - organization = models.ForeignKey(Organization, null=True, related_name="projects", on_delete=models.CASCADE) + name = models.CharField(max_length=100, unique=True, db_collation="_") + slug = models.SlugField(max_length=100, blank=True) + organization = models.ForeignKey(Organization, related_name="projects", on_delete=models.CASCADE) parent = TreeForeignKey("self", on_delete=models.CASCADE, null=True, blank=True, related_name="children") objects = ProjectManager() @@ -110,9 +109,6 @@ class Meta: def __str__(self): return self.name - def natural_key(self): - return self.slug, self.organization.slug - def save(self, *args, **kwargs): if not self.slug: self.slug = slugify(self.name) @@ -134,15 +130,11 @@ class Validator(AdminReverseMixin, NaturalKeyModel): SCRIPT = "script" HANDLER = "handler" - CONSOLE = mark_safe( - """ + CONSOLE = """ console = {log: function(d) {}}; """ - ) LIB = (Path(__file__).parent / "static" / "smart_validation.min.js").read_text() - # LIB += (Path(__file__).parent / 'static' / 'validate_utils.min.js').read_text() - LIB3 = mark_safe( - """ + LIB3 = """ TODAY = new Date(); dateutil = {today: TODAY}; @@ -158,13 +150,19 @@ class Validator(AdminReverseMixin, NaturalKeyModel): _.is_adult = function(d) { return !_.is_child(d)}; """ - ) version = AutoIncVersionField() last_update_date = models.DateTimeField(auto_now=True) - label = CICharField(max_length=255) - name = CICharField(verbose_name=_("Function Name"), max_length=255, unique=True, blank=True, null=True) + label = models.CharField(max_length=255, db_collation="_") + name = models.CharField( + verbose_name=_("Function Name"), + max_length=255, + unique=True, + blank=True, + null=True, + db_collation="_", + ) code = models.TextField(blank=True, null=True) target = models.CharField( max_length=10, @@ -184,7 +182,9 @@ class Validator(AdminReverseMixin, NaturalKeyModel): count_errors = models.BooleanField(default=False, help_text="Count failures") active = models.BooleanField(default=False, blank=True, help_text="Enable/Disable validator.") draft = models.BooleanField( - default=False, blank=True, help_text="Testing purposes: draft validator are enabled only for staff users." + default=False, + blank=True, + help_text="Testing purposes: draft validator are enabled only for staff users.", ) _natural_key = ["name"] @@ -193,9 +193,9 @@ def __str__(self): @staticmethod def js_type(value): - if isinstance(value, (datetime, date, time)): + if isinstance(value, datetime | date | time): return str(value) - if isinstance(value, (dict,)): + if isinstance(value, dict): return jsonfy(value) return value @@ -222,10 +222,8 @@ def validate(self, value, registration=None): set_tag("validator", self.name) - if self.active: - self.monitor(self.STATUS_UNKNOWN, value) - else: - self.monitor(self.STATUS_INACTIVE, value) + status = self.STATUS_UNKNOWN if self.active else self.STATUS_INACTIVE + self.monitor(status, value) if self.active or (self.draft and state.request.user.is_staff): ctx = MiniRacer() @@ -241,19 +239,20 @@ def validate(self, value, registration=None): ret = False else: try: - ret = jsonpickle.decode(result) + ret = json.loads(result) except (JSONDecodeError, TypeError): ret = result if isinstance(ret, str): raise ValidationError(_(ret)) - elif isinstance(ret, (list, tuple)): + if isinstance(ret, list | tuple): errors = [_(v) for v in ret] raise ValidationError(errors) - elif isinstance(ret, dict): + if isinstance(ret, dict): errors = {k: _(v) for (k, v) in ret.items()} raise ValidationError(errors) - elif isinstance(ret, bool) and not ret: + if isinstance(ret, bool) and not ret or ret is JSUndefined: raise ValidationError(_("Please insert a valid value")) + except ValidationError as e: import sentry_sdk @@ -281,6 +280,7 @@ def validate(self, value, registration=None): elif self.trace: self.monitor(self.STATUS_SKIP, value) + return None def save(self, force_insert=False, force_update=False, using=None, update_fields=None): if not self.name: @@ -304,11 +304,15 @@ def inner(value): class FlexForm(AdminReverseMixin, I18NModel, NaturalKeyModel): version = AutoIncVersionField() last_update_date = models.DateTimeField(auto_now=True) - project = models.ForeignKey(Project, null=True, on_delete=models.CASCADE) - name = CICharField(max_length=255, unique=True) + project = models.ForeignKey(Project, on_delete=models.CASCADE) + name = models.CharField(max_length=255, unique=True, db_collation="_") base_type = StrategyClassField(registry=form_registry, default=FlexFormBaseForm) validator = models.ForeignKey( - Validator, limit_choices_to={"target": Validator.FORM}, blank=True, null=True, on_delete=models.PROTECT + Validator, + limit_choices_to={"target": Validator.FORM}, + blank=True, + null=True, + on_delete=models.PROTECT, ) advanced = models.JSONField(default=dict, blank=True) @@ -323,33 +327,6 @@ def __init__(self, *args, **kwargs): self._initial = {} super().__init__(*args, **kwargs) - # def add_field( - # self, - # label, - # field_type=forms.CharField, - # required=False, - # choices=None, - # regex=None, - # validator=None, - # name=None, - # **kwargs, - # ): - # if isinstance(choices, (list, tuple)): - # kwargs["choices"] = choices - # choices = None - # return self.fields.update_or_create( - # label=label, - # defaults={ - # "name": name, - # "field_type": field_type, - # "choices": choices, - # "regex": regex, - # "validator": validator, - # "advanced": kwargs, - # "required": required, - # }, - # )[0] - def get_initial(self): return self._initial @@ -382,8 +359,7 @@ def get_form_class(self): "indexes": indexes, **fields, } - flexForm = type(f"{self.name}FlexForm", (self.base_type,), form_class_attrs) - return flexForm + return type(f"{self.name}FlexForm", (self.base_type,), form_class_attrs) def get_formsets_classes(self): formsets = {} @@ -403,33 +379,39 @@ def save(self, force_insert=False, force_update=False, using=None, update_fields def get_usage(self): ret = [] - for reg in self.registration_set.all(): - ret.append( + ret.extend( + [ { "type": "Registration", "obj": reg, "editor_url": reverse("admin:registration_registration_change", args=[reg.pk]), "change_url": reverse("admin:registration_registration_change", args=[reg.pk]), } - ) - for fs in self.formsets.all(): - ret.append( + for reg in self.registration_set.all() + ] + ) + ret.extend( + [ { "type": "Parend Of", "obj": fs.flex_form, "editor_url": reverse("admin:core_flexform_form_editor", args=[fs.flex_form.pk]), "change_url": reverse("admin:core_flexform_change", args=[fs.flex_form.pk]), } - ) - for fs in self.formset_set.all(): - ret.append( + for fs in self.formsets.all() + ] + ) + ret.extend( + [ { "type": "Child Of", "obj": fs.parent, "editor_url": reverse("admin:core_flexform_form_editor", args=[fs.parent.pk]), "change_url": reverse("admin:core_flexform_change", args=[fs.parent.pk]), } - ) + for fs in self.formset_set.all() + ] + ) return ret @@ -460,7 +442,7 @@ class FormSet(AdminReverseMixin, NaturalKeyModel, OrderableModel): version = AutoIncVersionField() last_update_date = models.DateTimeField(auto_now=True) - name = CICharField(max_length=255) + name = models.CharField(max_length=255) title = models.CharField(max_length=300, blank=True, null=True) description = models.TextField(max_length=2000, blank=True, null=True) enabled = models.BooleanField(default=True) @@ -473,11 +455,17 @@ class FormSet(AdminReverseMixin, NaturalKeyModel, OrderableModel): dynamic = models.BooleanField(default=True) validator = models.ForeignKey( - Validator, blank=True, null=True, limit_choices_to={"target": Validator.FORMSET}, on_delete=models.SET_NULL + Validator, + blank=True, + null=True, + limit_choices_to={"target": Validator.FORMSET}, + on_delete=models.SET_NULL, ) advanced = models.JSONField(default=dict, blank=True) + _natural_key = ["name"] + class Meta: verbose_name = "FormSet" verbose_name_plural = "FormSets" @@ -501,7 +489,7 @@ def widget_attrs(self): return self.advanced["smart"]["widget"] def get_formset(self) -> SmartBaseFormSet: - formSet = formset_factory( + form_set = formset_factory( self.get_form(), formset=SmartBaseFormSet, extra=self.extra, @@ -509,13 +497,18 @@ def get_formset(self) -> SmartBaseFormSet: absolute_max=self.max_num, max_num=self.max_num, ) - formSet.fs = self - formSet.required = self.min_num > 0 - return formSet + form_set.fs = self + form_set.required = self.min_num > 0 + return form_set FIELD_KWARGS = { - forms.CharField: {"min_length": None, "max_length": None, "empty_value": "", "initial": None}, + forms.CharField: { + "min_length": None, + "max_length": None, + "empty_value": "", + "initial": None, + }, forms.IntegerField: {"min_value": None, "max_value": None, "initial": None}, forms.DateField: {"initial": None}, fields.LocationField: {}, @@ -569,13 +562,22 @@ class FlexFormField(AdminReverseMixin, NaturalKeyModel, I18NModel, OrderableMode flex_form = models.ForeignKey(FlexForm, on_delete=models.CASCADE, related_name="fields") label = models.CharField(max_length=2000) - name = CICharField(max_length=100, blank=True, validators=[RegexValidator("^[a-z_0-9]*$")]) + name = models.CharField( + max_length=100, + blank=True, + validators=[RegexValidator("^[a-z_0-9]*$")], + db_collation="_", + ) field_type = StrategyClassField(registry=field_registry, import_error=import_custom_field) choices = models.CharField(max_length=2000, blank=True, null=True) required = models.BooleanField(default=False) enabled = models.BooleanField(default=True) validator = models.ForeignKey( - Validator, blank=True, null=True, limit_choices_to={"target": Validator.FIELD}, on_delete=models.PROTECT + Validator, + blank=True, + null=True, + limit_choices_to={"target": Validator.FIELD}, + on_delete=models.PROTECT, ) validation = models.TextField(blank=True, null=True) regex = RegexField(blank=True, null=True, validators=[RegexPatternValidator()]) @@ -642,7 +644,6 @@ def get_field_kwargs(self): field_kwargs["required"] = False regex = self.regex - # data = kwargs.pop("data", {}).copy() smart_attrs["data-flex"] = self.name if self.required: smart_attrs["required_by_question"] = "required" @@ -652,18 +653,12 @@ def get_field_kwargs(self): field_kwargs["required"] = True else: smart_attrs["required_by_question"] = "" - # field_kwargs["required"] = False - if not smart_attrs.get("visible", True): - smart_attrs["data-visibility"] = "hidden" - elif smart_attrs.get("question", ""): + if not smart_attrs.get("visible", True) or smart_attrs.get("question", ""): smart_attrs["data-visibility"] = "hidden" field_kwargs.setdefault("smart_attrs", smart_attrs.copy()) field_kwargs.setdefault("label", self.label) - # - # if not smart_attrs.get("question"): - # kwargs.setdefault("required", self.required) field_kwargs.setdefault("validators", get_validators(self)) @@ -675,9 +670,9 @@ def get_field_kwargs(self): field_kwargs["datasource"] = self.advanced["datasource"] if hasattr(field_type, "choices"): - if "choices" in smart_attrs: + if smart_attrs.get("choices"): field_kwargs["choices"] = smart_attrs["choices"] - elif "choices" in self.advanced: # old deprecated + if self.advanced.get("choices"): # old deprecated field_kwargs["choices"] = self.advanced["choices"] elif self.choices: field_kwargs["choices"] = clean_choices(self.choices.split(",")) @@ -703,7 +698,7 @@ def get_instance(self): field_type = self.field_type kwargs = self.get_field_kwargs() kwargs.setdefault("flex_field", self) - tt = type(field_type.__name__, (SmartFieldMixin, field_type), dict()) + tt = type(field_type.__name__, (SmartFieldMixin, field_type), {}) fld = tt(**kwargs) except Exception as e: logger.exception(e) @@ -713,8 +708,6 @@ def get_instance(self): def clean(self): if self.field_type: try: - # dict_setdefault(self.advanced, self.FLEX_FIELD_DEFAULT_ATTRS) - # dict_setdefault(self.advanced, {"kwargs": FIELD_KWARGS.get(self.field_type, {})}) self.get_instance() except Exception as e: logger.exception(e) @@ -752,20 +745,32 @@ def get_from_cache(self, name): class OptionSet(AdminReverseMixin, NaturalKeyModel, models.Model): version = AutoIncVersionField() last_update_date = models.DateTimeField(auto_now=True) - name = CICharField(max_length=100, unique=True, validators=[RegexValidator("[a-z0-9-_]")]) + name = models.CharField( + max_length=100, + unique=True, + validators=[RegexValidator("[a-z0-9-_]")], + db_collation="_", + ) description = models.CharField(max_length=1000, blank=True, null=True) data = models.TextField(blank=True, null=True) separator = models.CharField(max_length=1, default="", blank=True) comment = models.CharField(max_length=1, default="#", blank=True) columns = models.CharField( - max_length=20, default="0,0,-1", blank=True, help_text="column order. Es: 'pk,parent,label' or 'pk,label'" + max_length=20, + default="0,0,-1", + blank=True, + help_text="column order. Es: 'pk,parent,label' or 'pk,label'", ) pk_col = models.IntegerField(default=0, help_text="ID column number") parent_col = models.IntegerField(default=-1, help_text="Column number of the indicating parent element") locale = models.CharField(max_length=5, default="en-us", help_text="default language code") languages = models.CharField( - max_length=255, default="-;-;", blank=True, null=True, help_text="language code of each column." + max_length=255, + default="-;-;", + blank=True, + null=True, + help_text="language code of each column.", ) _natural_key = ["name"] @@ -841,32 +846,32 @@ def as_json(self, language=None): def clean_choices(value): - if not isinstance(value, (list, tuple)): + if not isinstance(value, list | tuple): raise ValueError("choices must be list or tuple") try: return list(dict(value).items()) except ValueError: - return list(zip(map(str.lower, value), value)) + return list(zip(map(str.lower, value), value, strict=True)) class CustomFieldType(AdminReverseMixin, NaturalKeyModel, models.Model): - name = CICharField(max_length=100, unique=True, validators=[RegexValidator("[A-Z][a-zA-Z0-9_]*")]) + name = models.CharField( + max_length=100, + unique=True, + validators=[RegexValidator("[A-Z][a-zA-Z0-9_]*")], + db_collation="_", + ) base_type = StrategyClassField(registry=field_registry, default=forms.CharField) attrs = models.JSONField(default=dict) regex = RegexField(blank=True, null=True) - # choices = models.CharField(max_length=2000, blank=True, null=True) - # required = models.BooleanField(default=False) validator = models.ForeignKey( - Validator, blank=True, null=True, limit_choices_to={"target": Validator.FIELD}, on_delete=models.PROTECT + Validator, + blank=True, + null=True, + limit_choices_to={"target": Validator.FIELD}, + on_delete=models.PROTECT, ) - @staticmethod - def build(name, defaults): - choices = defaults.get("attrs", {}).get("choices", {}) - if choices: - defaults["attrs"]["choices"] = clean_choices(choices) - return CustomFieldType.objects.update_or_create(name=name, defaults=defaults)[0] - def __str__(self): return self.name @@ -876,6 +881,13 @@ def save(self, force_insert=False, force_update=False, using=None, update_fields if fqn(cls) not in field_registry: field_registry.register(cls) + @staticmethod + def build(name, defaults): + choices = defaults.get("attrs", {}).get("choices", {}) + if choices: + defaults["attrs"]["choices"] = clean_choices(choices) + return CustomFieldType.objects.update_or_create(name=name, defaults=defaults)[0] + def clean(self): if not self.base_type: raise ValidationError("base_type is mandatory") @@ -893,4 +905,8 @@ def clean(self): def get_class(self): attrs = self.attrs.copy() attrs["custom"] = self - return type(self.base_type)(underscore_to_camelcase(self.name), (CustomFieldMixin, self.base_type), attrs) + return type(self.base_type)( + underscore_to_camelcase(self.name), + (CustomFieldMixin, self.base_type), + attrs, + ) diff --git a/src/aurora/core/registry.py b/src/aurora/core/registry.py index 8626c714..6345da18 100644 --- a/src/aurora/core/registry.py +++ b/src/aurora/core/registry.py @@ -3,6 +3,7 @@ from django import forms from django.core.exceptions import ObjectDoesNotExist + from simplemathcaptcha.fields import MathCaptchaField from strategy_field.exceptions import StrategyAttributeError from strategy_field.registry import Registry @@ -23,13 +24,12 @@ def clean_classname(value): def classloader(value): if not value: return value - elif isinstance(value, str): + if isinstance(value, str): value = clean_classname(value) return import_by_name(value) - elif isclass(value): + if isclass(value): return value - else: - return type(value) + return type(value) def get_custom_field(value): @@ -58,7 +58,10 @@ def get_name(self, entry): def as_choices(self): if not self._choices: - self._choices = sorted([(fqn(klass), self.get_name(klass)) for klass in self], key=lambda e: e[1]) + self._choices = sorted( + [(fqn(klass), self.get_name(klass)) for klass in self], + key=lambda e: e[1], + ) return self._choices def __contains__(self, y): @@ -79,7 +82,6 @@ def __contains__(self, y): field_registry.register(forms.DateTimeField) field_registry.register(forms.DurationField) field_registry.register(forms.EmailField) -# field_registry.register(forms.FileField) field_registry.register(forms.FloatField) field_registry.register(forms.GenericIPAddressField) field_registry.register(forms.ImageField) diff --git a/src/aurora/core/templates/admin/base.html b/src/aurora/core/templates/admin/base.html index bbae6dad..3e1a6680 100644 --- a/src/aurora/core/templates/admin/base.html +++ b/src/aurora/core/templates/admin/base.html @@ -1,6 +1,7 @@ {% extends "admin/base.html" %} {% block extrahead %} + {{ block.super }} ', self.absolute_path(path), version) for path in self._js] - def render_css(self): # To keep rendering order consistent, we can't just iterate over items(). # We need to sort the keys, and iterate over the sorted list. diff --git a/src/aurora/core/views.py b/src/aurora/core/views.py index cf4e6f8c..c3d244af 100644 --- a/src/aurora/core/views.py +++ b/src/aurora/core/views.py @@ -23,7 +23,7 @@ def _filter(record): valid = valid and str(record["parent"]) == str(parent) return valid - data = { + return { "results": [ { "id": record["pk"], @@ -34,7 +34,6 @@ def _filter(record): if _filter(record) ], } - return data # @method_decorator(cache_page(60 * 60), name="dispatch") @@ -71,5 +70,7 @@ def get(self, request, *args, **kwargs): def service_worker(request): - response = HttpResponse(open(settings.PWA_SERVICE_WORKER_PATH).read(), content_type="application/javascript") - return response + return HttpResponse( + open(settings.PWA_SERVICE_WORKER_PATH).read(), + content_type="application/javascript", + ) diff --git a/src/aurora/counters/admin.py b/src/aurora/counters/admin.py index 37408f58..48b5c731 100644 --- a/src/aurora/counters/admin.py +++ b/src/aurora/counters/admin.py @@ -1,12 +1,13 @@ import logging -from admin_extra_buttons.decorators import button -from adminfilters.autocomplete import AutoCompleteFilter from django.contrib.admin import register from django.db.transaction import atomic from django.http import HttpResponseRedirect from django.shortcuts import render from django.urls import reverse + +from admin_extra_buttons.decorators import button +from adminfilters.autocomplete import LinkedAutoCompleteFilter from smart_admin.modeladmin import SmartModelAdmin from ..core.utils import is_root @@ -24,13 +25,27 @@ def get_token(request): @register(Counter) class CounterAdmin(SmartModelAdmin): list_display = ("registration", "day", "records") - list_filter = (("registration", AutoCompleteFilter), "day") + list_filter = ( + ("registration__project__organization", LinkedAutoCompleteFilter.factory()), + ( + "registration__project", + LinkedAutoCompleteFilter.factory(parent="registration__project__organization"), + ), + ( + "registration", + LinkedAutoCompleteFilter.factory(parent="registration__project"), + ), + "day", + ) date_hierarchy = "day" autocomplete_fields = ("registration",) change_form_template = None paginator = LargeTablePaginator show_full_result_count = False + def check(self, **kwargs): + return super().check(**kwargs) + def get_exclude(self, request, obj=None): return ("details",) diff --git a/src/aurora/counters/migrations/0001_initial.py b/src/aurora/counters/migrations/0001_initial.py index ad991234..daceda10 100644 --- a/src/aurora/counters/migrations/0001_initial.py +++ b/src/aurora/counters/migrations/0001_initial.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - initial = True dependencies = [ @@ -16,13 +15,24 @@ class Migration(migrations.Migration): migrations.CreateModel( name="Counter", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("day", models.DateField(blank=True, db_index=True, null=True)), ("records", models.IntegerField(blank=True, default=0, null=True)), ("details", models.JSONField(blank=True, default=dict)), ( "registration", - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="registration.registration"), + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="registration.registration", + ), ), ], options={ diff --git a/src/aurora/counters/migrations/0002_alter_counter_registration.py b/src/aurora/counters/migrations/0002_alter_counter_registration.py index 7e9491d6..dcff71c8 100644 --- a/src/aurora/counters/migrations/0002_alter_counter_registration.py +++ b/src/aurora/counters/migrations/0002_alter_counter_registration.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0037_auto_20220819_0950"), ("counters", "0001_initial"), @@ -16,7 +15,9 @@ class Migration(migrations.Migration): model_name="counter", name="registration", field=models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, related_name="counters", to="registration.registration" + on_delete=django.db.models.deletion.CASCADE, + related_name="counters", + to="registration.registration", ), ), ] diff --git a/src/aurora/counters/templates/counters/chart_base.html b/src/aurora/counters/templates/counters/chart_base.html index bd05276b..260442bf 100644 --- a/src/aurora/counters/templates/counters/chart_base.html +++ b/src/aurora/counters/templates/counters/chart_base.html @@ -11,6 +11,11 @@ {% endblock %} {% block body %} +

+ {{ registration.project.organization }} / + {{ registration.project }} / + {{ registration }}

+
{% block info %}
diff --git a/src/aurora/counters/templates/counters/chart_month.html b/src/aurora/counters/templates/counters/chart_month.html index 08e5d474..1b0071eb 100644 --- a/src/aurora/counters/templates/counters/chart_month.html +++ b/src/aurora/counters/templates/counters/chart_month.html @@ -1,6 +1,6 @@ {% extends "counters/chart_base.html" %}{% load static %} {% block data %} {% endblock %} diff --git a/src/aurora/counters/templates/counters/index.html b/src/aurora/counters/templates/counters/index.html index e8d12938..efa64df5 100644 --- a/src/aurora/counters/templates/counters/index.html +++ b/src/aurora/counters/templates/counters/index.html @@ -3,10 +3,10 @@
-

Welcome {{ user }}

+

{{ organization }}

diff --git a/src/aurora/counters/templates/counters/project.html b/src/aurora/counters/templates/counters/project.html new file mode 100644 index 00000000..3ce128a0 --- /dev/null +++ b/src/aurora/counters/templates/counters/project.html @@ -0,0 +1,16 @@ +{% extends "base.html" %}{% load static %} +{% block body %} + +
+
+

+ {{ project.organization }} / {{ project }}

+
    + {% for reg in registrations.all %} +
  • {{ reg }}
  • + {% endfor %} +
+
+
+ +{% endblock body %} diff --git a/src/aurora/counters/urls.py b/src/aurora/counters/urls.py index 3e8dec65..82a36177 100644 --- a/src/aurora/counters/urls.py +++ b/src/aurora/counters/urls.py @@ -1,11 +1,20 @@ from django.urls import path -from .views import MonthlyChartView, MonthlyDataView, index +from .views import MonthlyChartView, MonthlyDataView, index, project_index app_name = "charts" urlpatterns = [ - path("", index, name="index"), - path("/monthly/", MonthlyChartView.as_view(), name="registration"), - path("data//monthly/", MonthlyDataView.as_view(), name="monthly_data"), + path("/", index, name="index"), + path("//", project_index, name="project-index"), + path( + "///monthly/", + MonthlyChartView.as_view(), + name="registration", + ), + path( + "//data//monthly/", + MonthlyDataView.as_view(), + name="monthly_data", + ), ] diff --git a/src/aurora/counters/views.py b/src/aurora/counters/views.py index 390af88d..a7967334 100644 --- a/src/aurora/counters/views.py +++ b/src/aurora/counters/views.py @@ -1,5 +1,6 @@ from datetime import datetime +from django.contrib.auth import get_user_model from django.contrib.auth.decorators import login_required from django.contrib.auth.mixins import UserPassesTestMixin from django.core.exceptions import PermissionDenied @@ -8,18 +9,39 @@ from django.utils import timezone from django.views import View +from aurora.core.models import Organization, Project from aurora.core.utils import get_session_id, last_day_of_month, render from aurora.counters.models import Counter from aurora.registration.models import Registration +User = get_user_model() + @login_required() -def index(request): - regs = Registration.objects.filter(roles__user=request.user, roles__role__permissions__codename="view_counter") - context = {"registrations": regs} +def index(request, org): + o: Organization = Organization.objects.get(slug=org) + if not request.user.has_perm("counters.view_counter", o): + raise PermissionDenied("----") + context = { + "organization": o, + "projects": o.projects.filter(members__user=request.user), + } return render(request, "counters/index.html", context) +@login_required() +def project_index(request, org, prj): + o: Organization = Organization.objects.get(slug=org) + p: Project = Project.objects.get(organization=o, pk=prj) + if not request.user.has_perm("counters.view_counter", p): + raise PermissionDenied("----") + context = { + "project": p, + "registrations": p.registrations.filter(members__user=request.user), + } + return render(request, "counters/project.html", context) + + class ChartView(UserPassesTestMixin, View): permission_denied_message = "----" login_url = "/login/" @@ -27,9 +49,9 @@ class ChartView(UserPassesTestMixin, View): def test_func(self): return self.request.user.is_authenticated - def get_registration(self, request, pk) -> Registration: - reg = get_object_or_404(Registration, id=pk) - if not request.user.has_perm("view_counter", reg): + def get_registration(self, request, org, prj, reg_pk) -> Registration: + reg = get_object_or_404(Registration, project__organization__slug=org, project_id=prj, id=reg_pk) + if not request.user.has_perm("counters.view_counter", reg): raise PermissionDenied("----") return reg @@ -38,8 +60,8 @@ def handle_no_permission(self): class MonthlyDataView(ChartView): - def get(self, request, registration_id): - registration = self.get_registration(request, registration_id) + def get(self, request, org, prj, registration_id): + registration = self.get_registration(request, org, prj, registration_id) qs = Counter.objects.filter(registration_id=registration_id).order_by("day") param_month = request.GET.get("m", None) total = 0 @@ -62,7 +84,7 @@ def get(self, request, registration_id): total += record.records if not labels: - labels = [d.strftime("%-d, %a") for d in values.keys()] + labels = [d.strftime("%-d, %a") for d in values] period = date.strftime("%B %Y") data = { "datapoints": qs.all().count(), @@ -72,11 +94,7 @@ def get(self, request, registration_id): "labels": labels, "data": list(values.values()), } - response = JsonResponse(data) - # response["Cache-Control"] = "max-age=315360000" - # response["Last-Modified"] = "max-age=315360000" - # response["ETag"] = etag - return response + return JsonResponse(data) def daily_data(request, registration, record): @@ -84,8 +102,8 @@ def daily_data(request, registration, record): class MonthlyChartView(ChartView): - def get(self, request, registration): - reg: Registration = self.get_registration(request, registration) + def get(self, request, org, prj, registration): + reg: Registration = self.get_registration(request, org, prj, registration) first: [Counter] = reg.counters.first() latest: [Counter] = reg.counters.last() if not latest: diff --git a/src/aurora/ddt_panels.py b/src/aurora/ddt_panels.py index fe3d51ae..e932a95e 100644 --- a/src/aurora/ddt_panels.py +++ b/src/aurora/ddt_panels.py @@ -1,9 +1,10 @@ import io -from debug_toolbar.panels import Panel from django.core.management import call_command from django.template import Context, Template -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ + +from debug_toolbar.panels import Panel from aurora.state import state diff --git a/src/aurora/flatpages/admin.py b/src/aurora/flatpages/admin.py index a462c7a8..7316d65b 100644 --- a/src/aurora/flatpages/admin.py +++ b/src/aurora/flatpages/admin.py @@ -1,7 +1,8 @@ -from admin_extra_buttons.decorators import button, view -from admin_sync.mixin import SyncMixin from django.conf import settings from django.shortcuts import render + +from admin_extra_buttons.decorators import button, view +from admin_sync.mixin import SyncMixin from smart_admin.modeladmin import SmartModelAdmin from .forms import FlatPageForm @@ -21,17 +22,6 @@ class FlatPageAdmin(SyncMixin, SmartModelAdmin): filter_horizontal = ("sites",) search_fields = ("url", "title") save_on_top = True - # fieldsets = ( - # (None, {'fields': ('url', 'title', 'content', 'sites')}), - # (_('Advanced options'), { - # 'classes': ('collapse',), - # 'fields': ( - # 'enable_comments', - # 'registration_required', - # 'template_name', - # ), - # }), - # ) def get_changeform_initial_data(self, request): initial = super().get_changeform_initial_data(request) diff --git a/src/aurora/flatpages/apps.py b/src/aurora/flatpages/apps.py index 6aeb5ad9..d7b4e34d 100644 --- a/src/aurora/flatpages/apps.py +++ b/src/aurora/flatpages/apps.py @@ -1,6 +1,7 @@ from django.apps import AppConfig from django.urls import NoReverseMatch, get_script_prefix, reverse from django.utils.encoding import iri_to_uri + from smart_admin.decorators import smart_register diff --git a/src/aurora/flatpages/forms.py b/src/aurora/flatpages/forms.py index 68b1397d..9d852292 100644 --- a/src/aurora/flatpages/forms.py +++ b/src/aurora/flatpages/forms.py @@ -7,6 +7,7 @@ from django.template.context_processors import static from django.utils.text import slugify from django.utils.translation import gettext_lazy as _ + from tinymce.widgets import AdminTinyMCE @@ -57,10 +58,22 @@ class FlatPageForm(forms.ModelForm): # toolbar_sticky_offset: isSmallScreen ? 102: 108, "block_formats": "Paragraph=p; Header 1=h1; Header 2=h2; Header 3=h3; Header 4=h4", "formats": { - "h1": {"block": "h1", "attributes": {"class": "text-5xl leading-1"}}, - "h2": {"block": "h2", "attributes": {"class": "text-4xl leading-1"}}, - "h3": {"block": "h3", "attributes": {"class": "text-3xl leading-1"}}, - "h4": {"block": "h4", "attributes": {"class": "text-2xl leading-1"}}, + "h1": { + "block": "h1", + "attributes": {"class": "text-5xl leading-1"}, + }, + "h2": { + "block": "h2", + "attributes": {"class": "text-4xl leading-1"}, + }, + "h3": { + "block": "h3", + "attributes": {"class": "text-3xl leading-1"}, + }, + "h4": { + "block": "h4", + "attributes": {"class": "text-2xl leading-1"}, + }, }, }, ), @@ -76,7 +89,7 @@ class FlatPageForm(forms.ModelForm): help_text=_("Example: “/about/contact/”. Leading and trailing slashes will be added."), error_messages={ "invalid": _( - "This value must contain only letters, numbers, dots, " "underscores, dashes, slashes or tildes." + "This value must contain only letters, numbers, dots, underscores, dashes, slashes or tildes." ), }, ) @@ -84,7 +97,6 @@ class FlatPageForm(forms.ModelForm): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields["content"].widget.mce_attrs["content_css"] = [static("bob/mailing.css")] - # self.fields["html"].widget.mce_attrs["document_base_url"] = get_server_url() class Media: js = ("flatpages/tinymce_init.js",) @@ -122,5 +134,4 @@ def clean(self): code="duplicate_url", params={"url": url, "site": site}, ) - # self.cleaned_data["url"] = url return super().clean() diff --git a/src/aurora/flatpages/views.py b/src/aurora/flatpages/views.py index b8b0b646..fd2305d7 100644 --- a/src/aurora/flatpages/views.py +++ b/src/aurora/flatpages/views.py @@ -4,7 +4,6 @@ from django.http import Http404, HttpResponse, HttpResponsePermanentRedirect from django.shortcuts import get_object_or_404 from django.template import loader -from django.utils.safestring import mark_safe from django.views.decorators.csrf import csrf_protect DEFAULT_TEMPLATE = "flatpages/default.html" @@ -29,18 +28,15 @@ def flatpage(request, url): except Http404: if not url.endswith("/") and settings.APPEND_SLASH: url += "/" - f = get_object_or_404(FlatPage, url=url, sites=site_id) + get_object_or_404(FlatPage, url=url, sites=site_id) return HttpResponsePermanentRedirect("%s/" % request.path) - else: - raise + raise return render_flatpage(request, f) @csrf_protect def render_flatpage(request, f): - """ - Internal interface to the flat page view. - """ + """Return an internal interface to the flat page view.""" # If registration is required for accessing this page, and the user isn't # logged in, redirect to the login page. if f.registration_required and not request.user.is_authenticated: @@ -55,7 +51,5 @@ def render_flatpage(request, f): # To avoid having to always use the "|safe" filter in flatpage templates, # mark the title and content as already safe (since they are raw HTML # content in the first place). - f.title = mark_safe(f.title) - f.content = mark_safe(f.content) return HttpResponse(template.render({"flatpage": f}, request)) diff --git a/src/aurora/i18n/admin.py b/src/aurora/i18n/admin.py index 71b32b6c..612bfd22 100644 --- a/src/aurora/i18n/admin.py +++ b/src/aurora/i18n/admin.py @@ -5,10 +5,6 @@ from unittest.mock import Mock from urllib.parse import unquote -from admin_extra_buttons.decorators import button, view -from adminfilters.combo import ChoicesFieldComboFilter -from adminfilters.querystring import QueryStringFilter -from dateutil.utils import today from django.conf import settings from django.contrib import messages from django.contrib.admin import register @@ -20,6 +16,11 @@ from django.urls import reverse from django.utils import translation from django.utils.translation import get_language + +from admin_extra_buttons.decorators import button, view +from adminfilters.combo import ChoicesFieldComboFilter +from adminfilters.querystring import QueryStringFilter +from dateutil.utils import today from smart_admin.modeladmin import SmartModelAdmin from ..core.admin_sync import SyncMixin @@ -94,7 +95,10 @@ def import_translations(self, request): if form.is_valid() and opts_form.is_valid(): csv_file = form.cleaned_data["csv_file"] if csv_file.multiple_chunks(): - self.message_user(request, "Uploaded file is too big (%.2f MB)" % (csv_file.size / 1000)) + self.message_user( + request, + "Uploaded file is too big (%.2f MB)" % (csv_file.size / 1000), + ) else: ctx["language_code"] = form.cleaned_data["locale"] ctx["language"] = dict(form.fields["locale"].choices)[ctx["language_code"]] @@ -143,7 +147,9 @@ def import_translations(self, request): selected += 1 info = row[1] __, c = Message.objects.update_or_create( - locale=lang, msgid=info["msgid"], defaults={"msgstr": info["msgstr"]} + locale=lang, + msgid=info["msgid"], + defaults={"msgstr": info["msgstr"]}, ) ids.append(str(__.pk)) if c: @@ -206,16 +212,11 @@ def check_orphans(self, request): translation.activate(locale) state.collect_messages = False state.hit_messages = False - # return render(request, "admin/i18n/message/check_orphans.html", ctx) else: form = LanguageForm() ctx["form"] = form return render(request, "admin/i18n/message/check_orphans.html", ctx) - # @link() - # def translate(self, button): - # return button - @view() def get_or_create(self, request): if request.method == "POST": @@ -262,7 +263,10 @@ def create_translation_single(self, request, pk): msg, created = Message.objects.get_or_create( msgid=original.msgid, locale=locale, - defaults={"md5": Message.get_md5(locale, original.msgid), "draft": True}, + defaults={ + "md5": Message.get_md5(locale, original.msgid), + "draft": True, + }, ) if created: self.message_user(request, "Message created.") @@ -273,8 +277,7 @@ def create_translation_single(self, request, pk): logger.exception(e) self.message_error_to_user(request, e) return HttpResponseRedirect(reverse("admin:i18n_message_change", args=[msg.pk])) - else: - ctx["form"] = form + ctx["form"] = form else: form = LanguageForm() ctx["form"] = form @@ -297,7 +300,10 @@ def create_translation(self, request): Message.objects.get_or_create( msgid=msg.msgid, locale=locale, - defaults={"md5": Message.get_md5(locale, msg.msgid), "draft": True}, + defaults={ + "md5": Message.get_md5(locale, msg.msgid), + "draft": True, + }, ) except Exception as e: logger.exception(e) @@ -305,7 +311,10 @@ def create_translation(self, request): updated = Message.objects.filter(locale=locale).count() added = Message.objects.filter(locale=locale, draft=True, timestamp__date=today()) - self.message_user(request, f"{updated - existing} messages created. {updated} available") + self.message_user( + request, + f"{updated - existing} messages created. {updated} available", + ) ctx["locale"] = locale ctx["added"] = added else: diff --git a/src/aurora/i18n/engine.py b/src/aurora/i18n/engine.py index 32412041..08bc97c9 100644 --- a/src/aurora/i18n/engine.py +++ b/src/aurora/i18n/engine.py @@ -2,6 +2,7 @@ from django.core.cache import caches from django.utils import timezone + from django_redis import get_redis_connection from ..state import state @@ -65,7 +66,7 @@ def __init__(self): self.locales = {} def reset(self): - for __, locale in self.locales.items(): + for locale in self.locales.values(): locale.reset() def activate(self, locale): diff --git a/src/aurora/i18n/fields.py b/src/aurora/i18n/fields.py index 6a733f45..a4f1866d 100644 --- a/src/aurora/i18n/fields.py +++ b/src/aurora/i18n/fields.py @@ -4,9 +4,7 @@ class LanguageField(models.CharField): - """ - A language field for Django models. - """ + """A language field for Django models.""" def __init__(self, *args, **kwargs): kwargs.setdefault("verbose_name", _("Language")) diff --git a/src/aurora/i18n/gettext.py b/src/aurora/i18n/get_text.py similarity index 81% rename from src/aurora/i18n/gettext.py rename to src/aurora/i18n/get_text.py index 419828fe..054570d6 100644 --- a/src/aurora/i18n/gettext.py +++ b/src/aurora/i18n/get_text.py @@ -1,13 +1,13 @@ from django.conf import settings -from django.utils.safestring import SafeData, mark_safe from django.utils.translation.trans_real import _active, _default, translation # noqa from .engine import translator def gettext(message): - """ - Translate the 'message' string. It uses the current thread to find the + """Translate the 'message' string. + + It uses the current thread to find the translation object to use. If no current translation is activated, the message will be run through the default translation object. """ @@ -26,7 +26,4 @@ def gettext(message): # is given, instead of metadata, which is the default gettext behavior. result = type(message)("") - if isinstance(message, SafeData): - return mark_safe(result) - return result diff --git a/src/aurora/i18n/handlers.py b/src/aurora/i18n/handlers.py index 837adbee..85bf2543 100644 --- a/src/aurora/i18n/handlers.py +++ b/src/aurora/i18n/handlers.py @@ -9,7 +9,8 @@ def update_cache(sender, instance, **kwargs): tznow = timezone.now() - serial = "{:%d-%m-%Y:%H:%M:%S}.{:03d}".format(tznow, tznow.microsecond // 1000) + msconds = tznow.microsecond // 1000 + serial = f"{tznow:%d-%m-%Y:%H:%M:%S}.{msconds:03d}" cache.set("i18n", serial) diff --git a/src/aurora/i18n/hreflang/__init__.py b/src/aurora/i18n/hreflang/__init__.py index c1388aea..c74c4189 100644 --- a/src/aurora/i18n/hreflang/__init__.py +++ b/src/aurora/i18n/hreflang/__init__.py @@ -2,11 +2,12 @@ from django.conf import settings -from .functions import get_hreflang_info, language_codes, languages, reverse -from .header import AddHreflangToResponse, hreflang_headers +from .functions import get_hreflang_info, language_codes, languages, reverse # noqa +from .header import AddHreflangToResponse, hreflang_headers # noqa -if not getattr(settings, "DISABLE_LOCALE_MIDDLEWARE_CHECK", False): - if not any("LocaleMiddleware" in mw for mw in settings.MIDDLEWARE): - warning("LocaleMiddleware is not turned on, hreflang (and i18n generally) may experience problems.") +if not getattr(settings, "DISABLE_LOCALE_MIDDLEWARE_CHECK", False) and not any( + "LocaleMiddleware" in mw for mw in settings.MIDDLEWARE +): + warning("LocaleMiddleware is not turned on, hreflang (and i18n generally) may experience problems.") # default __all__ is fine diff --git a/src/aurora/i18n/hreflang/functions.py b/src/aurora/i18n/hreflang/functions.py index 87adee51..7c58d2bf 100644 --- a/src/aurora/i18n/hreflang/functions.py +++ b/src/aurora/i18n/hreflang/functions.py @@ -9,7 +9,7 @@ def reverse(view_name, lang=None, use_lang_prefix=True, *args, **kwargs): """ - Similar to django.core.urlresolvers.reverse except for the parameters: + Similar to django.core.urlresolvers.reverse except for the parameters. :param lang: Language code in which the url is to be translated (ignored if use_lang_prefix is False). :param use_lang_prefix: If changed to False, get an url without language prefix. @@ -29,8 +29,8 @@ def reverse(view_name, lang=None, use_lang_prefix=True, *args, **kwargs): deactivate() url = lang_implied_reverse(view_name, args=args, kwargs=kwargs) if not use_lang_prefix: - if not url.startswith("/{0}".format(settings.LANGUAGE_CODE)): - raise NoReverseMatch('could not find reverse match for "{}" with language "{}"'.format(view_name, lang)) + if not url.startswith(f"/{settings.LANGUAGE_CODE}"): + raise NoReverseMatch(f'could not find reverse match for "{view_name}" with language "{lang}"') url = url[1 + len(settings.LANGUAGE_CODE):] # fmt: skip activate(cur_language) return url @@ -38,32 +38,45 @@ def reverse(view_name, lang=None, use_lang_prefix=True, *args, **kwargs): def get_hreflang_info(path, default=True): """ + Return a list of (code, url) tuples for all language versions. + :param path: Current path (request.path). :param default: Include the default landing page (x-default without language code). - :return: A list of (code, url) tuples for all language versions. """ reverse_match = resolve(path) info = [] if default: - info.append(("x-default", reverse(reverse_match.view_name, use_lang_prefix=False, kwargs=reverse_match.kwargs))) - for lang in language_codes(): info.append( - (lang, reverse(reverse_match.view_name, lang=lang, use_lang_prefix=True, kwargs=reverse_match.kwargs)) + ( + "x-default", + reverse( + reverse_match.view_name, + use_lang_prefix=False, + kwargs=reverse_match.kwargs, + ), + ) + ) + return [ + ( + lang, + reverse( + reverse_match.view_name, + lang=lang, + use_lang_prefix=True, + kwargs=reverse_match.kwargs, + ), ) - return info + for lang in language_codes() + ] -@lru_cache() +@lru_cache def languages(): - """ - Get language and regionale codes and names of all languages that are supported as a dictionary. - """ - return {key: name for key, name in settings.LANGUAGES} + """Get language and regionale codes and names of all languages that are supported as a dictionary.""" + return dict(settings.LANGUAGES) -@lru_cache() +@lru_cache def language_codes(): - """ - Get language with regionale codes of all languages that are supported. - """ + """Get language with regionale codes of all languages that are supported.""" return languages().keys() diff --git a/src/aurora/i18n/hreflang/header.py b/src/aurora/i18n/hreflang/header.py index d0521a7b..9e615bf0 100644 --- a/src/aurora/i18n/hreflang/header.py +++ b/src/aurora/i18n/hreflang/header.py @@ -1,8 +1,9 @@ """ -Add hreflang response headers as specified by Google +Add hreflang response headers as specified by Google. https://support.google.com/webmasters/answer/189077?hl=en """ + from django.utils.deprecation import MiddlewareMixin from .functions import get_hreflang_info @@ -10,27 +11,26 @@ def hreflang_headers(response, request=None, path=None): """ - Adds hreflang headers to a HttpResponse object + Add hreflang headers to a HttpResponse object. :param response: the HttpResponse to add headers to :param path: the current path for which to add alternate language versions :param request: the request, which is used to find path (ignored if path is set directly) :return: response is modified and returned """ - assert request or path, "hreflang_headers needs the current url, please either provide request or a path" + if not (request or path): + raise Exception("hreflang_headers needs the current url, please either provide request or a path") links = [] hreflang_info = get_hreflang_info(path or request.path) for lang, url in hreflang_info: - links.append('<{1}>; rel="alternate"; hreflang="{0}"'.format(lang, url)) - response["Link"] = "{0},".format(response["Link"]) if "Link" in response else "" + links.append(f'<{url}>; rel="alternate"; hreflang="{lang}"') + response["Link"] = f"{response['Link']}," if "Link" in response else "" response["Link"] += ",".join(links) return response class AddHreflangToResponse(MiddlewareMixin): - """ - A middleware that applies hreflang_headers to all responses (adding hreflang headers). - """ + """Applies hreflang_headers to all responses (adding hreflang headers).""" def process_response(self, request, response): return hreflang_headers(response, request=request) diff --git a/src/aurora/i18n/migrations/0001_initial.py b/src/aurora/i18n/migrations/0001_initial.py index 2d238ed0..04ec8286 100644 --- a/src/aurora/i18n/migrations/0001_initial.py +++ b/src/aurora/i18n/migrations/0001_initial.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - initial = True dependencies = [] @@ -15,11 +14,23 @@ class Migration(migrations.Migration): migrations.CreateModel( name="Message", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ( "locale", aurora.i18n.fields.LanguageField( - choices=[("en-us", "English"), ("pl-pl", "Polskie"), ("uk-ua", "український")], + choices=[ + ("en-us", "English"), + ("pl-pl", "Polskie"), + ("uk-ua", "український"), + ], default="en-us", max_length=10, null=True, @@ -28,7 +39,10 @@ class Migration(migrations.Migration): ), ("msgid", models.TextField()), ("msgstr", models.TextField(blank=True, null=True)), - ("md5", models.CharField(db_index=True, max_length=512, verbose_name="MD5")), + ( + "md5", + models.CharField(db_index=True, max_length=512, verbose_name="MD5"), + ), ], ), ] diff --git a/src/aurora/i18n/migrations/0002_auto_20220328_0811.py b/src/aurora/i18n/migrations/0002_auto_20220328_0811.py index 4ab3c0a5..73ba2e94 100644 --- a/src/aurora/i18n/migrations/0002_auto_20220328_0811.py +++ b/src/aurora/i18n/migrations/0002_auto_20220328_0811.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - dependencies = [ ("i18n", "0001_initial"), ] @@ -23,7 +22,11 @@ class Migration(migrations.Migration): model_name="message", name="locale", field=aurora.i18n.fields.LanguageField( - choices=[("uk-ua", "український"), ("en-us", "English"), ("pl-pl", "Polskie")], + choices=[ + ("uk-ua", "український"), + ("en-us", "English"), + ("pl-pl", "Polskie"), + ], default="en-us", max_length=10, null=True, diff --git a/src/aurora/i18n/migrations/0004_auto_20220412_1242.py b/src/aurora/i18n/migrations/0004_auto_20220412_1242.py index d7eae418..17601a0e 100644 --- a/src/aurora/i18n/migrations/0004_auto_20220412_1242.py +++ b/src/aurora/i18n/migrations/0004_auto_20220412_1242.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("i18n", "0003_auto_20220412_1223"), ] diff --git a/src/aurora/i18n/migrations/0005_auto_20220421_0941.py b/src/aurora/i18n/migrations/0005_auto_20220421_0941.py index 40cb59cf..f5688ee6 100644 --- a/src/aurora/i18n/migrations/0005_auto_20220421_0941.py +++ b/src/aurora/i18n/migrations/0005_auto_20220421_0941.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("i18n", "0004_auto_20220412_1242"), ] diff --git a/src/aurora/i18n/migrations/0006_alter_message_md5.py b/src/aurora/i18n/migrations/0006_alter_message_md5.py index 0fe7f155..dedede39 100644 --- a/src/aurora/i18n/migrations/0006_alter_message_md5.py +++ b/src/aurora/i18n/migrations/0006_alter_message_md5.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("i18n", "0005_auto_20220421_0941"), ] diff --git a/src/aurora/i18n/migrations/0007_message_msgcode.py b/src/aurora/i18n/migrations/0007_message_msgcode.py index f481d2dd..3bb6480f 100644 --- a/src/aurora/i18n/migrations/0007_message_msgcode.py +++ b/src/aurora/i18n/migrations/0007_message_msgcode.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("i18n", "0006_alter_message_md5"), ] diff --git a/src/aurora/i18n/migrations/0008_auto_20220429_2218.py b/src/aurora/i18n/migrations/0008_auto_20220429_2218.py index c1fecdca..de707570 100644 --- a/src/aurora/i18n/migrations/0008_auto_20220429_2218.py +++ b/src/aurora/i18n/migrations/0008_auto_20220429_2218.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("i18n", "0007_message_msgcode"), ] diff --git a/src/aurora/i18n/migrations/0009_alter_message_locale.py b/src/aurora/i18n/migrations/0009_alter_message_locale.py index 2d79dfdd..aa083fc8 100644 --- a/src/aurora/i18n/migrations/0009_alter_message_locale.py +++ b/src/aurora/i18n/migrations/0009_alter_message_locale.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("i18n", "0008_auto_20220429_2218"), ] diff --git a/src/aurora/i18n/migrations/0010_alter_message_locale.py b/src/aurora/i18n/migrations/0010_alter_message_locale.py index 650426de..ff176be7 100644 --- a/src/aurora/i18n/migrations/0010_alter_message_locale.py +++ b/src/aurora/i18n/migrations/0010_alter_message_locale.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("i18n", "0009_alter_message_locale"), ] diff --git a/src/aurora/i18n/migrations/0011_alter_message_locale.py b/src/aurora/i18n/migrations/0011_alter_message_locale.py index df2a9fad..817c34f6 100644 --- a/src/aurora/i18n/migrations/0011_alter_message_locale.py +++ b/src/aurora/i18n/migrations/0011_alter_message_locale.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("i18n", "0010_alter_message_locale"), ] diff --git a/src/aurora/i18n/migrations/0012_alter_message_locale.py b/src/aurora/i18n/migrations/0012_alter_message_locale.py index 7ec09206..d45c8cbe 100644 --- a/src/aurora/i18n/migrations/0012_alter_message_locale.py +++ b/src/aurora/i18n/migrations/0012_alter_message_locale.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("i18n", "0011_alter_message_locale"), ] diff --git a/src/aurora/i18n/models.py b/src/aurora/i18n/models.py index c67a7b83..86b02ce9 100644 --- a/src/aurora/i18n/models.py +++ b/src/aurora/i18n/models.py @@ -3,6 +3,7 @@ from django.db import models from django.template.defaultfilters import truncatechars from django.utils.translation import gettext_lazy as _ + from natural_keys import NaturalKeyModel from .fields import LanguageField diff --git a/src/aurora/i18n/templatetags/hreflang.py b/src/aurora/i18n/templatetags/hreflang.py index e97468b2..adf8ee1c 100644 --- a/src/aurora/i18n/templatetags/hreflang.py +++ b/src/aurora/i18n/templatetags/hreflang.py @@ -1,5 +1,5 @@ """ -Create hreflang tags as specified by Google +Create hreflang tags as specified by Google. https://support.google.com/webmasters/answer/189077?hl=en """ @@ -7,7 +7,6 @@ from django import template from django.urls import NoReverseMatch from django.urls.base import resolve -from django.utils.safestring import mark_safe from django.utils.translation import get_language from aurora.core.utils import cache_aware_url @@ -24,7 +23,8 @@ def translate_url(context, lang, view_name=None, *args, **kwargs): @param lang: Which language should the url be translated to. @param view_name: Which view to get url from, current if not set. """ - assert "request" in context, "translate_url needs request context" + if "request" not in context: + raise Exception("translate_url needs request context") try: kwargs["lang"] = lang if view_name is None: @@ -39,15 +39,14 @@ def translate_url(context, lang, view_name=None, *args, **kwargs): @register.simple_tag(takes_context=True) def hreflang_tags(context, indent=0): - """ - Create all hreflang tags (which includes the current document as per the standard). - """ - assert "request" in context, "hreflang_tags needs request context" + """Create all hreflang tags (which includes the current document as per the standard).""" + if "request" not in context: + raise Exception("hreflang_tags needs request context") hreflang_info = get_hreflang_info(context["request"].path) hreflang_html = [] for lang, url in hreflang_info: - hreflang_html.append('\n'.format(lang, url)) - return mark_safe(("\t" * indent).join(hreflang_html)) + hreflang_html.append(f'\n') + return ("\t" * indent).join(hreflang_html) def _make_list_html(path, incl_current): @@ -55,11 +54,9 @@ def _make_list_html(path, incl_current): hreflang_html = "" for lang, url in hreflang_info: if lang == get_language() and incl_current: - hreflang_html += '
  • {0}
  • \n'.format( - languages()[lang] - ) + hreflang_html += f'
  • {languages()[lang]}
  • \n' else: - hreflang_html += '
  • {1}
  • \n'.format(url, languages()[lang]) + hreflang_html += f'
  • {languages()[lang]}
  • \n' return hreflang_html @@ -67,16 +64,17 @@ def _make_list_html(path, incl_current): def lang_list(context): """ HTML list items with links to each language version of this document. + The current document is included without link and with a special .hreflang_current_language class. """ - assert "request" in context, "lang_list needs request context" + if "request" not in context: + raise Exception("lang_list needs request context") return _make_list_html(context["request"].path, incl_current=True) @register.simple_tag(takes_context=True) def other_lang_list(context): - """ - Like lang_list, but the current language is excluded. - """ - assert "request" in context, "other_lang_list needs request context" + """Like lang_list, but the current language is excluded.""" + if "request" not in context: + raise Exception("other_lang_list needs request context") return _make_list_html(context["request"].path, incl_current=False) diff --git a/src/aurora/i18n/templatetags/itrans.py b/src/aurora/i18n/templatetags/itrans.py index cd602dc0..d4129ce9 100644 --- a/src/aurora/i18n/templatetags/itrans.py +++ b/src/aurora/i18n/templatetags/itrans.py @@ -5,7 +5,6 @@ from django.template.defaulttags import token_kwargs from django.templatetags.static import static from django.utils import translation -from django.utils.safestring import mark_safe from django.utils.translation import get_language from ..engine import translator @@ -33,9 +32,6 @@ def render(self, context): # Restore percent signs. Percent signs in template text are doubled # so they are not interpreted as string format flags. - # is_safe = isinstance(value, SafeData) - # value1 = value.replace("%%", "%") - # value = mark_safe(value1) if is_safe else value1 current_locale = get_language() if self.filter_expression.var.literal: msgid = self.filter_expression.var.literal @@ -45,11 +41,10 @@ def render(self, context): value = translator[current_locale][msgid] if self.asvar: - context[self.asvar] = value + context[self.asvar] = str(value) context[f"{self.asvar}_msgid"] = msgid return "" - else: - return value + return str(value) class BlockTranslateNode(Node): @@ -77,17 +72,17 @@ def __init__( def render_token_list(self, tokens): result = [] - vars = [] + variables = [] for token in tokens: if token.token_type == TokenType.TEXT: result.append(token.contents.replace("%", "%%")) elif token.token_type == TokenType.VAR: result.append("%%(%s)s" % token.contents) - vars.append(token.contents) + variables.append(token.contents) msg = "".join(result) if self.trimmed: msg = translation.trim_whitespace(msg) - return msg, vars + return msg, variables def render(self, context, nested=False): if self.message_context: @@ -97,10 +92,10 @@ def render(self, context, nested=False): # Update() works like a push(), so corresponding context.pop() is at # the end of function context.update({var: val.resolve(context) for var, val in self.extra_context.items()}) - singular, vars = self.render_token_list(self.singular) + singular, variables = self.render_token_list(self.singular) if self.plural and self.countervar and self.counter: count = self.counter.resolve(context) - if not isinstance(count, (Decimal, float, int)): + if not isinstance(count, Decimal | float | int): raise TemplateSyntaxError("%r argument to %r tag must be a number." % (self.countervar, self.tag_name)) context[self.countervar] = count plural, plural_vars = self.render_token_list(self.plural) @@ -108,12 +103,11 @@ def render(self, context, nested=False): result = translation.npgettext(message_context, singular, plural, count) else: result = translation.ngettext(singular, plural, count) - vars.extend(plural_vars) + variables.extend(plural_vars) + elif message_context: + result = translation.pgettext(message_context, singular) else: - if message_context: - result = translation.pgettext(message_context, singular) - else: - result = translation.gettext(singular) + result = translation.gettext(singular) default_value = context.template.engine.string_if_invalid def render_value(key): @@ -131,15 +125,14 @@ def render_value(key): if nested: # Either string is malformed, or it's a bug raise TemplateSyntaxError( - "%r is unable to format string returned by gettext: %r " "using %r" % (self.tag_name, result, data) + "%r is unable to format string returned by gettext: %r using %r" % (self.tag_name, result, data) ) with translation.override(None): result = self.render(context, nested=True) if self.asvar: - context[self.asvar] = result + context[self.asvar] = str(result) return "" - else: - return result + return str(result) @register.tag("translate") @@ -163,7 +156,7 @@ def do_translate(parser, token): raise TemplateSyntaxError( "The '%s' option was specified more than once." % option, ) - elif option == "noop": + if option == "noop": noop = True elif option == "context": try: @@ -172,7 +165,7 @@ def do_translate(parser, token): raise TemplateSyntaxError("No argument provided to the '%s' tag for the context option." % bits[0]) if value in invalid_context: raise TemplateSyntaxError( - "Invalid argument '%s' provided to the '%s' tag for the context " "option" % (value, bits[0]), + "Invalid argument '%s' provided to the '%s' tag for the context option" % (value, bits[0]), ) message_context = parser.compile_filter(value) elif option == "as": @@ -246,15 +239,15 @@ def do_block_translate(parser, token): # noqa while remaining_bits: option = remaining_bits.pop(0) if option in options: - raise TemplateSyntaxError("The %r option was specified more " "than once." % option) + raise TemplateSyntaxError("The %r option was specified more than once." % option) if option == "with": value = token_kwargs(remaining_bits, parser, support_legacy=True) if not value: - raise TemplateSyntaxError('"with" in %r tag needs at least ' "one keyword argument." % bits[0]) + raise TemplateSyntaxError('"with" in %r tag needs at least one keyword argument.' % bits[0]) elif option == "count": value = token_kwargs(remaining_bits, parser, support_legacy=True) if len(value) != 1: - raise TemplateSyntaxError('"count" in %r tag expected exactly ' "one keyword argument." % bits[0]) + raise TemplateSyntaxError('"count" in %r tag expected exactly one keyword argument.' % bits[0]) elif option == "context": try: value = remaining_bits.pop(0) @@ -277,10 +270,7 @@ def do_block_translate(parser, token): # noqa countervar, counter = next(iter(options["count"].items())) else: countervar, counter = None, None - if "context" in options: - message_context = options["context"] - else: - message_context = None + message_context = options.get("context") extra_context = options.get("with", {}) trimmed = options.get("trimmed", False) @@ -324,7 +314,6 @@ def md5(value, lang): from aurora.i18n.models import Message return Message.get_md5(value, lang) - # return hashlib.md5((lang + "__" + str(value)).encode()).hexdigest() @register.filter() @@ -345,4 +334,4 @@ def bool_icon(value): img = static("admin/img/icon-yes.svg") else: img = static("admin/img/icon-no.svg") - return mark_safe(f'{str(bool(value))}') + return f'{str(bool(value))}' diff --git a/src/aurora/i18n/translate.py b/src/aurora/i18n/translate.py index 81be25b8..a4595a2e 100644 --- a/src/aurora/i18n/translate.py +++ b/src/aurora/i18n/translate.py @@ -1,18 +1,17 @@ import logging - import uuid - -import requests from abc import ABC, abstractmethod + from django.conf import settings +import requests + logger = logging.getLogger(__name__) class Translator(ABC): @abstractmethod - def translate(self, language, text): - ... + def translate(self, language, text): ... class AzureTranslator: @@ -32,7 +31,7 @@ def translate(self, language, text): try: body = [{"text": text}] params = {"api-version": "3.0", "from": "en", "to": [language]} - request = requests.post(self.endpoint, params=params, headers=self.headers, json=body) + request = requests.post(self.endpoint, params=params, headers=self.headers, json=body, timeout=60) response = request.json() return response[0]["translations"][0]["text"] except Exception as e: diff --git a/src/aurora/i18n/views.py b/src/aurora/i18n/views.py index 7cd00903..9436f08c 100644 --- a/src/aurora/i18n/views.py +++ b/src/aurora/i18n/views.py @@ -40,5 +40,4 @@ def get(self, request, *args, **kwargs): return super().get(request) def render_to_response(self, context, **response_kwargs): - response = super().render_to_response(context, **response_kwargs) - return response + return super().render_to_response(context, **response_kwargs) diff --git a/src/aurora/management/commands/demo.py b/src/aurora/management/commands/demo.py index e87e8c68..5d34e8dc 100644 --- a/src/aurora/management/commands/demo.py +++ b/src/aurora/management/commands/demo.py @@ -1,13 +1,12 @@ -""" -""" import datetime import logging -import random +import secrets import sys +from django import forms + import djclick as click import pytz -from django import forms from aurora.core import fields from aurora.core.models import OptionSet @@ -16,7 +15,7 @@ logger = logging.getLogger(__name__) -class NotRunningInTTYException(Exception): +class NotRunningInTTYError(Exception): pass @@ -27,21 +26,25 @@ def demo(**kwargs): vf1, __ = Validator.objects.update_or_create( name='name must start with "S"', - defaults=dict(target=Validator.FORM, code="value.family_name.startsWith('S');"), + defaults={"target": Validator.FORM, "code": "value.family_name.startsWith('S');"}, ) v1, __ = Validator.objects.get_or_create( name="max_length_25", - defaults=dict(message="String too long (max 25.chars)", target=Validator.FIELD, code="value.length<25;"), + defaults={ + "message": "String too long (max 25.chars)", + "target": Validator.FIELD, + "code": "value.length<25;", + }, ) v2, __ = Validator.objects.get_or_create( name="date_after_3000", - defaults=dict( - message="Date must be after 3000-12-01", - target=Validator.FIELD, - code="""var limit = Date.parse("3000-12-01"); + defaults={ + "message": "Date must be after 3000-12-01", + "target": Validator.FIELD, + "code": """var limit = Date.parse("3000-12-01"); var dt = Date.parse(value); dt > limit;""", - ), + }, ) OptionSet.objects.get_or_create( name="italian_locations", @@ -51,25 +54,30 @@ def demo(**kwargs): }, ) - hh, __ = FlexForm.objects.get_or_create(name="Demo Household", defaults=dict(validator=vf1)) + hh, __ = FlexForm.objects.get_or_create(name="Demo Household", defaults={"validator": vf1}) hh.fields.get_or_create(label="Family Name", field_type=forms.CharField, required=True) ind, __ = FlexForm.objects.get_or_create(name="Demo Individual") - ind.fields.get_or_create(label="First Name", defaults=dict(field_type=forms.CharField, required=True, validator=v1)) - ind.fields.get_or_create(label="Last Name", defaults=dict(field_type=forms.CharField, validator=v1)) - ind.fields.get_or_create(label="Date Of Birth", defaults=dict(field_type=forms.DateField, validator=v2)) + ind.fields.get_or_create( + label="First Name", + defaults={"field_type": forms.CharField, "required": True, "validator": v1}, + ) + ind.fields.get_or_create(label="Last Name", defaults={"field_type": forms.CharField, "validator": v1}) + ind.fields.get_or_create(label="Date Of Birth", defaults={"field_type": forms.DateField, "validator": v2}) ind.fields.get_or_create( - label="Options", defaults=dict(field_type=forms.ChoiceField, choices="opt 1, opt 2, opt 3") + label="Options", + defaults={"field_type": forms.ChoiceField, "choices": "opt 1, opt 2, opt 3"}, ) ind.fields.get_or_create( - label="Location", defaults={"field_type": fields.SelectField, "choices": "italian_locations"} + label="Location", + defaults={"field_type": fields.SelectField, "choices": "italian_locations"}, ) - hh.formsets.get_or_create(name="individuals", defaults=dict(flex_form=ind)) + hh.formsets.get_or_create(name="individuals", defaults={"flex_form": ind}) - reg, __ = Registration.objects.get_or_create(name="Demo Registration1", defaults=dict(flex_form=hh), active=True) + reg, __ = Registration.objects.get_or_create(name="Demo Registration1", defaults={"flex_form": hh}, active=True) today = datetime.datetime.today() last_month = datetime.datetime.combine(today - datetime.timedelta(days=31), datetime.datetime.min.time()) @@ -89,10 +97,10 @@ def demo(**kwargs): for day in range(1, 31): sys.stdout.write(f"{day},") sys.stdout.flush() - for _ in range(0, random.randint(*ranges[0])): - hour = random.randint(0, 23) - for _ in range(0, random.randint(*ranges[1])): - minute = random.randint(0, 59) + for _ in range(secrets.choice(range(*ranges[0]))): + hour = secrets.randbelow(24) + for _ in range(secrets.choice(range(*ranges[1]))): + minute = secrets.randbelow(60) ts = datetime.datetime(last_month.year, month, day, hour, minute, tzinfo=pytz.utc) with freeze_time(ts): Record.objects.create(registration=reg, remote_ip=fake.ipv4(), timestamp=ts) diff --git a/src/aurora/management/commands/env.py b/src/aurora/management/commands/env.py deleted file mode 100644 index bab840de..00000000 --- a/src/aurora/management/commands/env.py +++ /dev/null @@ -1,54 +0,0 @@ -import os - -from django.core.management import BaseCommand - - -class Command(BaseCommand): - def add_arguments(self, parser): - parser.add_argument( - "--no-mandatory", action="store_false", dest="mandatory", default=True, help="Do not dump mandatory" - ) - parser.add_argument( - "--no-optional", action="store_false", dest="optional", default=True, help="Do not dump optional" - ) - parser.add_argument("--no-values", action="store_false", dest="values", default=True, help="Do not dump values") - parser.add_argument( - "--comment-optional", action="store_true", dest="comment", default=False, help="Comment optional" - ) - parser.add_argument("--current", action="store_true", dest="current", default=False, help="Dump current values") - parser.add_argument("--vars", action="store_true", dest="vars", default=False, help="Dump current values") - parser.add_argument( - "--defaults", action="store_true", dest="defaults", default=False, help="Dump default values" - ) - parser.add_argument( - "--no-empty", action="store_true", dest="no_empty", default=False, help="Do not dump empty values" - ) - - def handle(self, *args, **options): - from aurora.config import MANDATORY, OPTIONS, SmartEnv, env - - if options["defaults"]: - EE = type("SmartEnv", (SmartEnv,), {"ENVIRON": {}}) - ee = EE(**MANDATORY, **OPTIONS) - - environment = {} - if options["mandatory"]: - environment.update(**MANDATORY) - if options["optional"]: - environment.update(**OPTIONS) - for k, v in sorted(environment.items()): - if options["defaults"]: - value = ee(k) - elif options["vars"]: - value = "${%s}" % k - elif options["current"]: - value = os.environ.get(k, "") - elif options["values"]: - value = env(k) - else: - value = "" - if value or not options["no_empty"]: - if options["comment"] and k in OPTIONS.keys(): - self.stdout.write(f"#{k}={value}") - else: - self.stdout.write(f"{k}={value}") diff --git a/src/aurora/management/commands/ukr.py b/src/aurora/management/commands/ukr.py index 48098209..e7e5f12a 100644 --- a/src/aurora/management/commands/ukr.py +++ b/src/aurora/management/commands/ukr.py @@ -1,10 +1,9 @@ -""" -""" import logging -import djclick as click from django.db.transaction import atomic +import djclick as click + from aurora.core import registry from aurora.core.models import CustomFieldType, FlexForm, OptionSet from aurora.core.registry import field_registry @@ -13,7 +12,7 @@ logger = logging.getLogger(__name__) -class NotRunningInTTYException(Exception): +class NotRunningInTTYError(Exception): pass @@ -2011,7 +2010,7 @@ def upgrade(**kwargs): for fld in optionsets: name = fld.pop("name") with atomic(): - fld = OptionSet.objects.update_or_create(name=name, defaults=fld) + OptionSet.objects.update_or_create(name=name, defaults=fld) custom_fields = [ { @@ -2034,7 +2033,11 @@ def upgrade(**kwargs): ) }, }, - {"name": "Gender", "base_type": registry.forms.ChoiceField, "attrs": {"choices": ["Female", "Male"]}}, + { + "name": "Gender", + "base_type": registry.forms.ChoiceField, + "attrs": {"choices": ["Female", "Male"]}, + }, { "name": "ID Type", "base_type": registry.forms.ChoiceField, @@ -2110,18 +2113,17 @@ def upgrade(**kwargs): for fld in custom_fields: name = fld.pop("name") with atomic(): - fld = CustomFieldType.build(name, fld) - field_registry.register(fld.get_class()) + custom_fld = CustomFieldType.build(name, fld) + field_registry.register(custom_fld.get_class()) base, __ = FlexForm.objects.get_or_create(name="Basic") hh, __ = FlexForm.objects.get_or_create(name="Household") ind, __ = FlexForm.objects.get_or_create(name="Individual") - doc, __ = FlexForm.objects.get_or_create(name="Document") - bank, __ = FlexForm.objects.get_or_create(name="Bank Account") + FlexForm.objects.get_or_create(name="Document") + FlexForm.objects.get_or_create(name="Bank Account") base.add_formset(hh, extra=1, dynamic=False, max_num=1, min_num=1) base.add_formset(ind, extra=1, dynamic=True, min_num=1) - # ind.add_formset(doc, extra=0, dynamic=True) base.add_field( "With whom may we share your information (select one or multiple among the following)?", @@ -2134,10 +2136,20 @@ def upgrade(**kwargs): name="enum_org", ) base.add_field("Residence status", "aurora.core.models.ResidenceStatus") - Registration.objects.get_or_create(name="Ucraina", defaults=dict(intro=INTRO, flex_form=base)) + Registration.objects.get_or_create(name="Ucraina", defaults={"intro": INTRO, "flex_form": base}) hh.add_field("Admin 1", registry.fields.AjaxSelectField, datasource="ua_admin1") - hh.add_field("Admin 2", registry.fields.AjaxSelectField, datasource="ua_admin2", parent="ua_admin1") - hh.add_field("Admin 3", registry.fields.AjaxSelectField, datasource="ua_admin3", parent="ua_admin2") + hh.add_field( + "Admin 2", + registry.fields.AjaxSelectField, + datasource="ua_admin2", + parent="ua_admin1", + ) + hh.add_field( + "Admin 3", + registry.fields.AjaxSelectField, + datasource="ua_admin3", + parent="ua_admin2", + ) ind.add_field("First Name") ind.add_field("Last Name") diff --git a/src/aurora/management/commands/upgrade.py b/src/aurora/management/commands/upgrade.py index 702c05d5..5c3350a2 100644 --- a/src/aurora/management/commands/upgrade.py +++ b/src/aurora/management/commands/upgrade.py @@ -1,20 +1,18 @@ -""" -""" import logging from pathlib import Path -import djclick as click from django.core.cache import cache from django.core.management import CommandError, call_command + +import djclick as click from redis.exceptions import LockError from aurora import VERSION -from aurora.core.models import FlexForm logger = logging.getLogger(__name__) -class NotRunningInTTYException(Exception): +class NotRunningInTTYError(Exception): pass @@ -36,24 +34,54 @@ class NotRunningInTTYException(Exception): ) @click.option("--migrate/--no-migrate", default=True, is_flag=True, help="Run database migrations") @click.option("--static/--no-static", default=True, is_flag=True, help="Collect static assets") -@click.option("--organization", default=None, envvar="DEFAULT_ORGANIZATION", help="Main Organization name") -def upgrade(admin_email, admin_password, static, migrate, prompt, verbosity, organization, **kwargs): +@click.option( + "--organization", + default=None, + envvar="DEFAULT_ORGANIZATION", + help="Main Organization name", +) +def upgrade( + admin_email, + admin_password, + static, + migrate, + prompt, + verbosity, + organization, + **kwargs, +): from aurora.config import env + from aurora.core.models import FlexForm, Organization, Project + from aurora.registration.models import Registration extra = {"no_input": prompt, "verbosity": verbosity - 1, "stdout": None} click.echo("Run upgrade.. waiting for lock") try: - with cache.lock(env("MIGRATION_LOCK_KEY"), timeout=60 * 10, blocking_timeout=2, version=VERSION): + with cache.lock( + env("MIGRATION_LOCK_KEY"), + timeout=60 * 10, + blocking_timeout=2, + version=VERSION, + ): if migrate: if verbosity >= 1: click.echo("Run migrations") call_command("migrate", **extra) call_command("create_extra_permissions") + # ensure project/org + click.echo("Set default Org/Project") + unicef, __ = Organization.objects.get_or_create(slug="unicef", defaults={"name": organization}) + default, __ = Project.objects.get_or_create(slug="default-project", organization=unicef) + + Project.objects.filter(organization__isnull=True).update(organization=unicef) + Registration.objects.filter(project__isnull=True).update(project=default) + FlexForm.objects.filter(project__isnull=True).update(project=default) + static_root = Path(env("STATIC_ROOT")) if not static_root.exists(): static_root.mkdir(parents=True) - print(f"STATIC_ROOT set to '{static_root}' ('{static_root.absolute()}')") + click.echo(f"STATIC_ROOT set to '{static_root}' ('{static_root.absolute()}')") if static: if verbosity >= 1: click.echo("Run collectstatic") @@ -72,13 +100,13 @@ def upgrade(admin_email, admin_password, static, migrate, prompt, verbosity, org if admin_email: from django.contrib.auth import get_user_model - User = get_user_model() + User = get_user_model() # noqa if User.objects.filter(is_superuser=True).exists(): - print("Superuser already exists. Ignoring ADMIN_EMAIL") + click.echo("Superuser already exists. Ignoring ADMIN_EMAIL") else: username, __ = admin_email.split("@") if User.objects.filter(username=username).exists(): - print("User with this name already exists") + click.echo("User with this name already exists") else: try: call_command( @@ -99,9 +127,9 @@ def upgrade(admin_email, admin_password, static, migrate, prompt, verbosity, org from django.utils import translation django.setup() - print(f"LANGUAGE_CODE: {settings.LANGUAGE_CODE}") - print(f"LOCALE: {translation.to_locale(settings.LANGUAGE_CODE)}") + click.echo(f"LANGUAGE_CODE: {settings.LANGUAGE_CODE}") + click.echo(f"LOCALE: {translation.to_locale(settings.LANGUAGE_CODE)}") translation.activate(settings.LANGUAGE_CODE) - print("check_for_language", translation.check_for_language("settings.LANGUAGE_CODE")) + click.echo(f"check_for_language {translation.check_for_language('settings.LANGUAGE_CODE')}") except LockError as e: - print(f"LockError: {e}") + click.echo(f"LockError: {e}") diff --git a/src/aurora/registration/admin/base.py b/src/aurora/registration/admin/base.py index a9ee9ce2..e69de29b 100644 --- a/src/aurora/registration/admin/base.py +++ b/src/aurora/registration/admin/base.py @@ -1,9 +0,0 @@ -# DATA = { -# "registration.Registration": [], -# "core.FlexForm": [], -# "core.FormSet": [], -# "core.Validator": [], -# "core.OptionSet": [], -# "core.FlexFormField": [], -# "i18n.Message": [], -# } diff --git a/src/aurora/registration/admin/filters.py b/src/aurora/registration/admin/filters.py index d7c1f7ca..46209b85 100644 --- a/src/aurora/registration/admin/filters.py +++ b/src/aurora/registration/admin/filters.py @@ -2,12 +2,13 @@ import re from datetime import datetime, timedelta -from adminfilters.numbers import NumberFilter from django.contrib.admin import SimpleListFilter from django.contrib.admin.options import IncorrectLookupParameters from django.urls import reverse from django.utils.translation import gettext as _ +from adminfilters.numbers import NumberFilter + from ...administration.filters import BaseAutoCompleteFilter logger = logging.getLogger(__name__) @@ -18,10 +19,10 @@ class OrganizationFilter(BaseAutoCompleteFilter): class RegistrationProjectFilter(BaseAutoCompleteFilter): - fk_name = "flex_form__project__organization__exact" + fk_name = "project__organization__exact" def has_output(self): - return "flex_form__project__organization__exact" in self.request.GET + return "project__organization__exact" in self.request.GET def get_url(self): url = reverse("%s:autocomplete" % self.admin_site.name) @@ -79,15 +80,14 @@ def queryset(self, request, queryset): self.filters = {match: value} elif m_range and m_range.groups(): start, end = self.re_range.match(raw_value).groups() - self.filters = {f"{self.field.name}__date__gte": start, f"{self.field.name}__date__lte": end} + self.filters = { + f"{self.field.name}__date__gte": start, + f"{self.field.name}__date__lte": end, + } elif m_list and m_list.groups(): value = raw_value.split(",") match = "%s__date__in" % self.field.name self.filters = {match: value} - # elif m_unlike and m_unlike.groups(): - # match = '%s__exact' % self.field.name - # op, value = self.re_unlike.match(raw).groups() - # queryset = queryset.exclude(**{match: value}) else: # pragma: no cover raise IncorrectLookupParameters() try: diff --git a/src/aurora/registration/admin/paginator.py b/src/aurora/registration/admin/paginator.py index 8e99c76e..c0fbcddc 100644 --- a/src/aurora/registration/admin/paginator.py +++ b/src/aurora/registration/admin/paginator.py @@ -1,10 +1,15 @@ +import logging from django.core.paginator import Paginator from django.db import OperationalError, connection, transaction from django.utils.functional import cached_property +logger = logging.getLogger(__name__) + class LargeTablePaginator(Paginator): """ + Override the count method of QuerySet objects to avoid timeouts. + Combination of ideas from: - https://gist.github.com/safar/3bbf96678f3e479b6cb683083d35cb4d - https://medium.com/@hakibenita/optimizing-django-admin-paginator-53c4eb6bfca3 @@ -19,9 +24,7 @@ class LargeTablePaginator(Paginator): @cached_property def count(self): - """ - Returns an estimated number of objects, across all pages. - """ + """Return an estimated number of objects, across all pages.""" try: with transaction.atomic(), connection.cursor() as cursor: # Limit to 150 ms @@ -38,9 +41,8 @@ def count(self): "SELECT reltuples FROM pg_class WHERE relname = %s", [self.object_list.query.model._meta.db_table], ) - estimate = int(cursor.fetchone()[0]) - return estimate - except Exception: + return int(cursor.fetchone()[0]) + except Exception as e: # If any other exception occurred fall back to default behaviour - pass + logger.exception(e) return super().count diff --git a/src/aurora/registration/admin/protocol.py b/src/aurora/registration/admin/protocol.py index 8d92c782..963578b3 100644 --- a/src/aurora/registration/admin/protocol.py +++ b/src/aurora/registration/admin/protocol.py @@ -1,13 +1,46 @@ -from typing import Iterable +import logging +from typing import Any, Iterable, Sequence + +from django.core.serializers.json import Deserializer as JsonDeserializer +from django.db import connections, transaction +from django.db.models import Q from admin_sync.collector import ForeignKeysCollector -from admin_sync.exceptions import SyncError +from admin_sync.exceptions import ProtocolError, SyncError from admin_sync.protocol import LoadDumpProtocol -from django.db.models import Q + +logger = logging.getLogger(__name__) class AuroraSyncRegistrationProtocol(LoadDumpProtocol): - def collect(self, data: Iterable, collect_related=True): + def serialize(self, data: Iterable): + return super().serialize(data) + + def deserialize(self, payload: str) -> list[list[Any]]: + processed = [] + try: + connection = connections[self.using] + with connection.constraint_checks_disabled(), transaction.atomic(self.using): + objects = JsonDeserializer( + payload, + ignorenonexistent=True, + handle_forward_references=True, + ) + for obj in objects: + obj.save(using=self.using) + processed.append( + [ + obj.object._meta.object_name, + str(obj.object.pk), + str(obj.object), + ] + ) + except Exception as e: + logger.exception(e) + raise ProtocolError(e) + return processed + + def collect(self, data: Sequence, collect_related=True): from aurora.core.models import FlexFormField, FormSet from aurora.registration.models import Registration @@ -18,7 +51,6 @@ def collect(self, data: Iterable, collect_related=True): raise ValueError("AuroraSyncRegistrationProtocol can be used only for Registration") return_value = [] for reg in list(data): - # reg: Registration = data[0] c = ForeignKeysCollector(False) c.collect([reg.flex_form, reg.validator, reg]) c.add(reg.scripts.all()) diff --git a/src/aurora/registration/admin/record.py b/src/aurora/registration/admin/record.py index 52c40779..8908c9db 100644 --- a/src/aurora/registration/admin/record.py +++ b/src/aurora/registration/admin/record.py @@ -1,13 +1,14 @@ import json import logging +from django.conf import settings +from django.shortcuts import render +from django.urls import reverse + from admin_extra_buttons.decorators import button, link from adminfilters.autocomplete import AutoCompleteFilter from adminfilters.numbers import NumberFilter from adminfilters.value import ValueFilter -from django.conf import settings -from django.shortcuts import render -from django.urls import reverse from smart_admin.modeladmin import SmartModelAdmin from ...core.utils import is_root @@ -21,7 +22,14 @@ class RecordAdmin(SmartModelAdmin): search_fields = ("registration__name",) list_display = ("timestamp", "remote_ip", "id", "registration", "ignored") - readonly_fields = ("registration", "timestamp", "remote_ip", "id", "fields", "counters") + readonly_fields = ( + "registration", + "timestamp", + "remote_ip", + "id", + "fields", + "counters", + ) list_filter = ( ("registration", AutoCompleteFilter), ("registrar", AutoCompleteFilter), @@ -42,16 +50,10 @@ class RecordAdmin(SmartModelAdmin): def get_actions(self, request): return {} - # {name: (func, name, desc) for func, name, desc in actions} - # actions = super().get_actions(request) - # for name, __ in actions.items(): - # print("src/aurora/registration/admin.py: 485", name) - # return {"export_as_csv": self.get_action(self.export_as_csv)} def get_queryset(self, request): qs = super().get_queryset(request) - qs = qs.select_related("registration", "registrar") - return qs + return qs.select_related("registration", "registrar") def get_common_context(self, request, pk=None, **kwargs): return super().get_common_context(request, pk, is_root=is_root(request), **kwargs) @@ -64,7 +66,10 @@ def changeform_view(self, request, object_id=None, form_url="", extra_context=No def receipt(self, button): try: if button.original: - base = reverse("register-done", args=[button.original.registration.pk, button.original.pk]) + base = reverse( + "register-done", + args=[button.original.registration.pk, button.original.pk], + ) button.href = base button.html_attrs["target"] = f"_{button.original.pk}" except Exception as e: diff --git a/src/aurora/registration/admin/registration.py b/src/aurora/registration/admin/registration.py index f1639918..e0c4208c 100644 --- a/src/aurora/registration/admin/registration.py +++ b/src/aurora/registration/admin/registration.py @@ -2,32 +2,34 @@ import io import json import logging -from django.utils.module_loading import import_string from hashlib import md5 -from admin_extra_buttons.decorators import button, choice, view -from admin_sync.mixin import SyncMixin -from dateutil.utils import today from django import forms from django.conf import settings from django.contrib import messages from django.core.cache import cache from django.db.models import JSONField +from django.db.models.functions import Collate from django.db.models.signals import post_delete, post_save from django.db.transaction import atomic from django.http import HttpResponse, HttpResponseRedirect from django.shortcuts import render -from django.template import Template from django.template.loader import select_template from django.urls import reverse, translate_url +from django.utils.module_loading import import_string from django.utils.text import slugify + +from admin_extra_buttons.decorators import button, choice, view +from admin_sync.mixin import SyncMixin +from adminfilters.mixin import AdminAutoCompleteSearchMixin +from dateutil.utils import today from django_redis import get_redis_connection from jsoneditor.forms import JSONEditor from smart_admin.modeladmin import SmartModelAdmin from aurora.core.admin.base import ConcurrencyVersionAdmin from aurora.core.forms import CSVOptionsForm, DateFormatsForm, VersionMedia -from aurora.core.models import FormSet, Validator, FlexForm, FlexFormField +from aurora.core.models import FlexForm, FlexFormField, FormSet, Validator from aurora.core.utils import ( build_dict, build_form_fake_data, @@ -36,8 +38,8 @@ is_root, namify, ) +from typing import TYPE_CHECKING from aurora.i18n.forms import TemplateForm, TranslationForm -from aurora.i18n.translate import Translator from aurora.registration.admin.filters import ( OrganizationFilter, RegistrationProjectFilter, @@ -55,22 +57,37 @@ logger = logging.getLogger(__name__) +if TYPE_CHECKING: + from aurora.i18n.translate import Translator + from django.template import Template + + def can_export_data(request, obj, handler=None): return (obj.export_allowed and request.user.has_perm("registration.export_data", obj)) or is_root(request) -class RegistrationAdmin(ConcurrencyVersionAdmin, SyncMixin, SmartModelAdmin): - search_fields = ("name", "title", "slug") +class RegistrationAdmin(ConcurrencyVersionAdmin, AdminAutoCompleteSearchMixin, SyncMixin, SmartModelAdmin): + search_fields = ("name_deterministic", "title", "slug") date_hierarchy = "start" list_filter = ( "active", - ("flex_form__project__organization", OrganizationFilter), - ("flex_form__project", RegistrationProjectFilter), + ("project__organization", OrganizationFilter), + ("project", RegistrationProjectFilter), + "archived", + "protected", + "show_in_homepage", + ) + list_display = ( + "name", + "slug", + "organization", + "project", + "secure", + "active", "archived", "protected", "show_in_homepage", ) - list_display = ("name", "slug", "project", "secure", "active", "archived", "protected", "show_in_homepage") exclude = ("public_key",) autocomplete_fields = ("flex_form",) save_as = True @@ -95,7 +112,13 @@ class RegistrationAdmin(ConcurrencyVersionAdmin, SyncMixin, SmartModelAdmin): }, ), ("Config", {"fields": ("flex_form", "validator", "scripts")}), - ("Validity", {"classes": ("collapse",), "fields": (("start", "end"), ("archived", "active"))}), + ( + "Validity", + { + "classes": ("collapse",), + "fields": (("start", "end"), ("archived", "active")), + }, + ), ("Languages", {"classes": ("collapse",), "fields": ("locale", "locales")}), ("Security", {"classes": ("collapse",), "fields": ("protected",)}), ("Text", {"classes": ("collapse",), "fields": ("intro", "footer")}), @@ -105,7 +128,22 @@ class RegistrationAdmin(ConcurrencyVersionAdmin, SyncMixin, SmartModelAdmin): protocol_class = AuroraSyncRegistrationProtocol def get_queryset(self, request): - return super().get_queryset(request).select_related("project") + return ( + super() + .get_queryset(request) + .annotate( + name_deterministic=Collate("name", "und-x-icu"), + ) + .select_related("project", "project__organization") + ) + + def get_list_display(self, request): + base = list(self.list_display) + if "project__organization__exact" in request.GET: + base.remove("organization") + else: + base.remove("project") + return base def formfield_for_dbfield(self, db_field, request, **kwargs): formfield = super().formfield_for_dbfield(db_field, request, **kwargs) @@ -178,7 +216,7 @@ def export_as_csv(self, request, pk): skipped = [] all_fields = [] for r in records: - for field_name in r.keys(): + for field_name in r: if field_name not in skipped and field_name in exclude_fields: skipped.append(field_name) elif field_name not in all_fields and field_name in include_fields: @@ -189,7 +227,6 @@ def export_as_csv(self, request, pk): out = io.StringIO() writer = csv.DictWriter( out, - # dialect="excel", fieldnames=all_fields, restval="-", extrasaction="ignore", @@ -200,16 +237,14 @@ def export_as_csv(self, request, pk): writer.writerows(records) out.seek(0) filename = f"Registration_{reg.slug}.csv" - response = HttpResponse( + return HttpResponse( out.read(), headers={"Content-Disposition": 'attachment;filename="%s"' % filename}, content_type="text/csv", ) - return response - else: - ctx["all_fields"] = sorted(set(all_fields)) - ctx["skipped"] = skipped - ctx["qs"] = records[:10] + ctx["all_fields"] = sorted(set(all_fields)) + ctx["skipped"] = skipped + ctx["qs"] = records[:10] except Exception as e: logger.exception(e) self.message_error_to_user(request, e) @@ -262,13 +297,15 @@ def removekey(self, request, pk): self.message_user(request, "Encryption key removed", messages.WARNING) self.log_change(request, self.object, "Encryption Key has been removed") return HttpResponseRedirect("..") - else: - return render(request, "admin/registration/registration/keys_remove.html", ctx) + return render(request, "admin/registration/registration/keys_remove.html", ctx) @view() def generate_keys(self, request, pk): ctx = self.get_common_context( - request, pk, media=self.media, title="Generate Private/Public Key pair to encrypt this Registration data" + request, + pk, + media=self.media, + title="Generate Private/Public Key pair to encrypt this Registration data", ) if request.method == "POST": @@ -355,10 +392,17 @@ def clone(self, request, pk): with atomic(): source = Registration.objects.get(id=reg.pk) title = form.cleaned_data["title"] - reg, __ = clone_model(source, name=namify(title), title=title, version=1, slug=slugify(title)) + reg, __ = clone_model( + source, + name=namify(title), + title=title, + version=1, + slug=slugify(title), + ) if form.cleaned_data["deep"]: main_form, __ = clone_model( - source.flex_form, name=f"{source.flex_form.name}-(clone: {reg.name})" + source.flex_form, + name=f"{source.flex_form.name}-(clone: {reg.name})", ) reg.flex_form = main_form reg.save() @@ -378,7 +422,11 @@ def clone(self, request, pk): clone_model(field, name=field.name, flex_form=frm2) for fs in formsets: - clone_model(fs, parent=forms[fs.parent.pk], flex_form=forms[fs.flex_form.pk]) + clone_model( + fs, + parent=forms[fs.parent.pk], + flex_form=forms[fs.flex_form.pk], + ) return HttpResponseRedirect(reverse("admin:registration_registration_inspect", args=[reg.pk])) except Exception as e: logger.exception(e) @@ -450,23 +498,29 @@ def prepare_translation(self, request, pk): locale = form.cleaned_data["locale"] translate = form.cleaned_data["translate"] if locale not in instance.locales: - self.message_user(request, "Language not enabled for this registration", messages.ERROR) + self.message_user( + request, + "Language not enabled for this registration", + messages.ERROR, + ) return HttpResponseRedirect(".") self.create_translation(self, request, pk) stored = con.lrange(key, 0, -1) - collected = sorted(set([c.decode() for c in stored])) + collected = sorted({c.decode() for c in stored}) from aurora.i18n.models import Message entries = list(Message.objects.filter(locale=locale).values_list("msgid", "msgstr")) data = dict(entries) if translate == "2": t: Translator = import_string(settings.TRANSLATOR_SERVICE)() - func = lambda x: t.translate(locale, x) + func = lambda x: t.translate(locale, x) # noqa elif translate == "1": t: Translator = import_string(settings.TRANSLATOR_SERVICE)() - func = lambda x: x if data.get(x, "") == x else t.translate(locale, x) + func = ( # noqa + lambda x: x if data.get(x, "") == x else t.translate(locale, x) + ) else: - func = lambda x: data.get(x, "") + func = lambda x: data.get(x, "") # noqa ctx["collected"] = {c: func(c) for c in collected} ctx["language_code"] = locale elif "export" in request.POST: @@ -481,8 +535,6 @@ def prepare_translation(self, request, pk): for i, msg in enumerate(msgids, 1): writer.writerow([str(i), msg, ""]) return response - # language_code = request.POST.get("language_code") - # for i, row in enumerate(data["messages"], 1): else: form = TranslationForm() @@ -513,7 +565,11 @@ def create_translation(self, request, pk): key = f"i18n_{request.user.pk}_{md5(request.session.session_key.encode()).hexdigest()}" settings.ALLOWED_HOSTS.append("testserver") - headers = {"HTTP_ACCEPT_LANGUAGE": "locale", "HTTP_I18N_SESSION": key, "HTTP_I18N": "true"} + headers = { + "HTTP_ACCEPT_LANGUAGE": "locale", + "HTTP_I18N_SESSION": key, + "HTTP_I18N": "true", + } try: client = Client(**headers) r1 = client.get(uri) @@ -530,7 +586,10 @@ def create_translation(self, request, pk): updated = Message.objects.filter(locale=locale).count() added = Message.objects.filter(locale=locale, draft=True, timestamp__date=today()) - self.message_user(request, f"{updated - existing} messages created. {updated} available") + self.message_user( + request, + f"{updated - existing} messages created. {updated} available", + ) ctx["uri"] = uri ctx["locale"] = locale ctx["added"] = added @@ -543,7 +602,12 @@ def create_translation(self, request, pk): @choice(order=900, change_list=False) def data(self, button): - button.choices = [self.charts, self.inspect_data, self.view_collected_data, self.collect] + button.choices = [ + self.charts, + self.inspect_data, + self.view_collected_data, + self.collect, + ] if can_export_data(button.context["request"], button.original): button.choices.append(self.export_as_csv) return button @@ -561,7 +625,13 @@ def inspect_data(self, request, pk): @view(change_form=True, html_attrs={"target": "_new"}) def charts(self, request, pk): - return HttpResponseRedirect(reverse("charts:registration", args=[pk])) + obj = self.get_object(request, pk) + return HttpResponseRedirect( + reverse( + "charts:registration", + args=[obj.project.organization.slug, obj.project.pk, pk], + ) + ) @view(permission=is_root, html_attrs={"class": "aeb-warn"}) def view_collected_data(self, button, pk): @@ -587,7 +657,11 @@ def james_editor(self, request, pk): form = JamesForm(request.POST, instance=ctx["original"]) if form.is_valid(): form.save() - cache.set(f"james_{pk}", form.cleaned_data["data"], version=get_system_cache_version()) + cache.set( + f"james_{pk}", + form.cleaned_data["data"], + version=get_system_cache_version(), + ) return HttpResponseRedirect(".") else: data = cache.get(f"james_{pk}", version=get_system_cache_version()) diff --git a/src/aurora/registration/debug.py b/src/aurora/registration/debug.py deleted file mode 100644 index aac43aba..00000000 --- a/src/aurora/registration/debug.py +++ /dev/null @@ -1,47 +0,0 @@ -import base64 - -import requests -from admin_extra_buttons.decorators import button -from django import forms -from django.template.response import TemplateResponse -from requests.auth import HTTPBasicAuth - -from aurora.core.utils import is_root -from aurora.registration.models import Record - - -class DebugMixin: - @button(permission=is_root) - def fetch(self, request): - class FetchForm(forms.Form): - host = forms.URLField() - username = forms.CharField() - password = forms.CharField(widget=forms.PasswordInput) - registration = forms.IntegerField() - start = forms.IntegerField() - end = forms.IntegerField() - - def clean(self): - return super().clean() - - ctx = self.get_common_context(request) - if request.method == "POST": - form = FetchForm(request.POST) - if form.is_valid(): - auth = HTTPBasicAuth(form.cleaned_data["username"], form.cleaned_data["password"]) - url = "{host}api/data/{registration}/{start}/{end}/".format(**form.cleaned_data) - with requests.get(url, stream=True, auth=auth) as res: - if res.status_code != 200: - raise Exception(str(res)) - payload = res.json() - for record in payload["data"]: - Record.objects.update_or_create( - registration_id=form.cleaned_data["registration"], - defaults={"timestamp": record["timestamp"], "storage": base64.b64decode(record["storage"])}, - ) - else: - form = FetchForm() - - ctx["form"] = form - response = TemplateResponse(request, "admin/registration/record/fetch.html", ctx) - return response diff --git a/src/aurora/registration/forms.py b/src/aurora/registration/forms.py index 9cb70a2d..d84b3858 100644 --- a/src/aurora/registration/forms.py +++ b/src/aurora/registration/forms.py @@ -1,11 +1,11 @@ import logging import re -import jmespath -from adminfilters.querystring import QueryStringFilter from django import forms from django.core.exceptions import ValidationError -from django.utils.safestring import mark_safe + +import jmespath +from adminfilters.querystring import QueryStringFilter from django_regex.utils import RegexList from mdeditor.fields import MDTextFormField @@ -32,19 +32,50 @@ def validate(self, value): def as_link(param): - return mark_safe(f'{param}') + return f'{param}' class RegistrationForm(forms.ModelForm): unique_field_path = JMESPathFormField( - required=False, help_text=mark_safe("JAMESPath expression. " f"Read more at {as_link('https://jmespath.org/')}") + required=False, + help_text=f"JAMESPath expression. Read more at {as_link('https://jmespath.org/')}", ) intro = MDTextFormField(required=False) footer = MDTextFormField(required=False) class Meta: model = Registration - exclude = () + fields = ( + "version", + "name", + "title", + "slug", + "project", + "flex_form", + "start", + "end", + "active", + "archived", + "locale", + "dry_run", + "handler", + "show_in_homepage", + "welcome_page", + "locales", + "intro", + "footer", + "client_validation", + "validator", + "scripts", + "unique_field_path", + "unique_field_error", + "public_key", + "encrypt_data", + "advanced", + "protected", + "is_pwa_enabled", + "export_allowed", + ) class CloneForm(forms.Form): @@ -80,8 +111,8 @@ class RegistrationExportForm(forms.Form): ) def clean_filters(self): - filter = QueryStringFilter(None, {}, Record, None) - return filter.get_filters(self.cleaned_data["filters"]) + qs_filter = QueryStringFilter(None, {}, Record, None) + return qs_filter.get_filters(self.cleaned_data["filters"]) def clean_include(self): try: @@ -98,9 +129,9 @@ def clean_exclude(self): class JamesForm(forms.ModelForm): - # unique_field = forms.CharField(widget=forms.HiddenInput) unique_field_path = forms.CharField( - label="JMESPath expression", widget=forms.TextInput(attrs={"style": "width:90%"}) + label="JMESPath expression", + widget=forms.TextInput(attrs={"style": "width:90%"}), ) data = forms.CharField(widget=forms.Textarea, required=False) diff --git a/src/aurora/registration/migrations/0001_initial.py b/src/aurora/registration/migrations/0001_initial.py index 07017bfe..edf84e56 100644 --- a/src/aurora/registration/migrations/0001_initial.py +++ b/src/aurora/registration/migrations/0001_initial.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - initial = True dependencies = [ @@ -17,20 +16,38 @@ class Migration(migrations.Migration): migrations.CreateModel( name="Registration", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), - ("name", django.contrib.postgres.fields.citext.CICharField(max_length=255, unique=True)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "name", + django.contrib.postgres.fields.citext.CICharField(max_length=255, unique=True), + ), ("start", models.DateField(auto_now_add=True)), ("end", models.DateField(blank=True, null=True)), ("active", models.BooleanField(default=False)), ( "locale", models.CharField( - choices=[("en-us", "English"), ("pl-pl", "Polskie"), ("uk-UA", "український")], + choices=[ + ("en-us", "English"), + ("pl-pl", "Polskie"), + ("uk-UA", "український"), + ], default="en", max_length=10, ), ), - ("flex_form", models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to="core.flexform")), + ( + "flex_form", + models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to="core.flexform"), + ), ], options={ "get_latest_by": "start", @@ -39,12 +56,23 @@ class Migration(migrations.Migration): migrations.CreateModel( name="Record", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("timestamp", models.DateField(auto_now_add=True)), ("data", models.JSONField(default=dict)), ( "registration", - models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to="registration.registration"), + models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, + to="registration.registration", + ), ), ], ), diff --git a/src/aurora/registration/migrations/0002_registration_public_key.py b/src/aurora/registration/migrations/0002_registration_public_key.py index 8ee263d4..d4f1a5bc 100644 --- a/src/aurora/registration/migrations/0002_registration_public_key.py +++ b/src/aurora/registration/migrations/0002_registration_public_key.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0001_initial"), ] diff --git a/src/aurora/registration/migrations/0003_record_data2.py b/src/aurora/registration/migrations/0003_record_data2.py index 17d656cb..a39818ac 100644 --- a/src/aurora/registration/migrations/0003_record_data2.py +++ b/src/aurora/registration/migrations/0003_record_data2.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0002_registration_public_key"), ] diff --git a/src/aurora/registration/migrations/0004_registration_public_key2.py b/src/aurora/registration/migrations/0004_registration_public_key2.py index 48ddb9cb..e110fd4a 100644 --- a/src/aurora/registration/migrations/0004_registration_public_key2.py +++ b/src/aurora/registration/migrations/0004_registration_public_key2.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0003_record_data2"), ] diff --git a/src/aurora/registration/migrations/0005_auto_20220312_2253.py b/src/aurora/registration/migrations/0005_auto_20220312_2253.py index ab810e74..d19e31c9 100644 --- a/src/aurora/registration/migrations/0005_auto_20220312_2253.py +++ b/src/aurora/registration/migrations/0005_auto_20220312_2253.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0004_registration_public_key2"), ] diff --git a/src/aurora/registration/migrations/0006_remove_record_data.py b/src/aurora/registration/migrations/0006_remove_record_data.py index 1c4e5a68..0de6cae1 100644 --- a/src/aurora/registration/migrations/0006_remove_record_data.py +++ b/src/aurora/registration/migrations/0006_remove_record_data.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0005_auto_20220312_2253"), ] diff --git a/src/aurora/registration/migrations/0007_registration_intro.py b/src/aurora/registration/migrations/0007_registration_intro.py index 0d30a4ae..454cd998 100644 --- a/src/aurora/registration/migrations/0007_registration_intro.py +++ b/src/aurora/registration/migrations/0007_registration_intro.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0006_remove_record_data"), ] diff --git a/src/aurora/registration/migrations/0008_registration_slug.py b/src/aurora/registration/migrations/0008_registration_slug.py index eb61e286..137d9604 100644 --- a/src/aurora/registration/migrations/0008_registration_slug.py +++ b/src/aurora/registration/migrations/0008_registration_slug.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0007_registration_intro"), ] diff --git a/src/aurora/registration/migrations/0009_registration_title.py b/src/aurora/registration/migrations/0009_registration_title.py index e4734bdd..5c034171 100644 --- a/src/aurora/registration/migrations/0009_registration_title.py +++ b/src/aurora/registration/migrations/0009_registration_title.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0008_registration_slug"), ] diff --git a/src/aurora/registration/migrations/0010_registration_advanced.py b/src/aurora/registration/migrations/0010_registration_advanced.py index f75b3007..f0db6122 100644 --- a/src/aurora/registration/migrations/0010_registration_advanced.py +++ b/src/aurora/registration/migrations/0010_registration_advanced.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0009_registration_title"), ] diff --git a/src/aurora/registration/migrations/0011_auto_20220321_0507.py b/src/aurora/registration/migrations/0011_auto_20220321_0507.py index 34a0d4ee..ae92d752 100644 --- a/src/aurora/registration/migrations/0011_auto_20220321_0507.py +++ b/src/aurora/registration/migrations/0011_auto_20220321_0507.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0010_registration_advanced"), ] diff --git a/src/aurora/registration/migrations/0012_alter_registration_locale.py b/src/aurora/registration/migrations/0012_alter_registration_locale.py index 1c16fdf6..1caaa076 100644 --- a/src/aurora/registration/migrations/0012_alter_registration_locale.py +++ b/src/aurora/registration/migrations/0012_alter_registration_locale.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0011_auto_20220321_0507"), ] @@ -14,7 +13,11 @@ class Migration(migrations.Migration): model_name="registration", name="locale", field=models.CharField( - choices=[("en-us", "English"), ("pl-pl", "Polskie"), ("uk-ua", "український")], + choices=[ + ("en-us", "English"), + ("pl-pl", "Polskie"), + ("uk-ua", "український"), + ], default="en", max_length=10, ), diff --git a/src/aurora/registration/migrations/0013_registration_validator.py b/src/aurora/registration/migrations/0013_registration_validator.py index 326a9127..20caa1f8 100644 --- a/src/aurora/registration/migrations/0013_registration_validator.py +++ b/src/aurora/registration/migrations/0013_registration_validator.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0029_alter_validator_target"), ("registration", "0012_alter_registration_locale"), diff --git a/src/aurora/registration/migrations/0014_registration_footer.py b/src/aurora/registration/migrations/0014_registration_footer.py index d5c28a0a..3173f961 100644 --- a/src/aurora/registration/migrations/0014_registration_footer.py +++ b/src/aurora/registration/migrations/0014_registration_footer.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0013_registration_validator"), ] diff --git a/src/aurora/registration/migrations/0015_alter_registration_locale.py b/src/aurora/registration/migrations/0015_alter_registration_locale.py index 43fdd4ae..72a79f88 100644 --- a/src/aurora/registration/migrations/0015_alter_registration_locale.py +++ b/src/aurora/registration/migrations/0015_alter_registration_locale.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0014_registration_footer"), ] @@ -14,7 +13,11 @@ class Migration(migrations.Migration): model_name="registration", name="locale", field=models.CharField( - choices=[("uk-ua", "український"), ("en-us", "English"), ("pl-pl", "Polskie")], + choices=[ + ("uk-ua", "український"), + ("en-us", "English"), + ("pl-pl", "Polskie"), + ], default="en-us", max_length=10, ), diff --git a/src/aurora/registration/migrations/0016_auto_20220328_1602.py b/src/aurora/registration/migrations/0016_auto_20220328_1602.py index 1652ef8b..fe473ee7 100644 --- a/src/aurora/registration/migrations/0016_auto_20220328_1602.py +++ b/src/aurora/registration/migrations/0016_auto_20220328_1602.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0015_alter_registration_locale"), ] diff --git a/src/aurora/registration/migrations/0017_auto_20220329_1713.py b/src/aurora/registration/migrations/0017_auto_20220329_1713.py index 532b1e58..9330ce19 100644 --- a/src/aurora/registration/migrations/0017_auto_20220329_1713.py +++ b/src/aurora/registration/migrations/0017_auto_20220329_1713.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0016_auto_20220328_1602"), ] @@ -19,7 +18,11 @@ class Migration(migrations.Migration): model_name="registration", name="locale", field=models.CharField( - choices=[("uk-ua", "український"), ("en-us", "English"), ("pl-pl", "Polskie")], + choices=[ + ("uk-ua", "український"), + ("en-us", "English"), + ("pl-pl", "Polskie"), + ], default="en-us", max_length=10, ), diff --git a/src/aurora/registration/migrations/0018_registration_encrypt_data.py b/src/aurora/registration/migrations/0018_registration_encrypt_data.py index a39310e0..ed0a10a4 100644 --- a/src/aurora/registration/migrations/0018_registration_encrypt_data.py +++ b/src/aurora/registration/migrations/0018_registration_encrypt_data.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0017_auto_20220329_1713"), ] diff --git a/src/aurora/registration/migrations/0019_alter_record_timestamp.py b/src/aurora/registration/migrations/0019_alter_record_timestamp.py index c6997cde..9bda3349 100644 --- a/src/aurora/registration/migrations/0019_alter_record_timestamp.py +++ b/src/aurora/registration/migrations/0019_alter_record_timestamp.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0018_registration_encrypt_data"), ] diff --git a/src/aurora/registration/migrations/0020_alter_record_timestamp.py b/src/aurora/registration/migrations/0020_alter_record_timestamp.py index 13ea9202..11887474 100644 --- a/src/aurora/registration/migrations/0020_alter_record_timestamp.py +++ b/src/aurora/registration/migrations/0020_alter_record_timestamp.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0019_alter_record_timestamp"), ] diff --git a/src/aurora/registration/migrations/0021_auto_20220402_0957.py b/src/aurora/registration/migrations/0021_auto_20220402_0957.py index 444705c6..3f7a9817 100644 --- a/src/aurora/registration/migrations/0021_auto_20220402_0957.py +++ b/src/aurora/registration/migrations/0021_auto_20220402_0957.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0020_alter_record_timestamp"), ] diff --git a/src/aurora/registration/migrations/0022_record_remote_ip.py b/src/aurora/registration/migrations/0022_record_remote_ip.py index 9a0ec80b..8238ece7 100644 --- a/src/aurora/registration/migrations/0022_record_remote_ip.py +++ b/src/aurora/registration/migrations/0022_record_remote_ip.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0021_auto_20220402_0957"), ] diff --git a/src/aurora/registration/migrations/0025_auto_20220422_1344.py b/src/aurora/registration/migrations/0025_auto_20220422_1344.py index 3f1c27a6..92133e85 100644 --- a/src/aurora/registration/migrations/0025_auto_20220422_1344.py +++ b/src/aurora/registration/migrations/0025_auto_20220422_1344.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0024_auto_20220412_1408"), ] diff --git a/src/aurora/registration/migrations/0026_auto_20220429_0708.py b/src/aurora/registration/migrations/0026_auto_20220429_0708.py index 10822c63..1e902157 100644 --- a/src/aurora/registration/migrations/0026_auto_20220429_0708.py +++ b/src/aurora/registration/migrations/0026_auto_20220429_0708.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0025_auto_20220422_1344"), ] diff --git a/src/aurora/registration/migrations/0027_alter_registration_slug.py b/src/aurora/registration/migrations/0027_alter_registration_slug.py index ada55c08..afbe2891 100644 --- a/src/aurora/registration/migrations/0027_alter_registration_slug.py +++ b/src/aurora/registration/migrations/0027_alter_registration_slug.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0026_auto_20220429_0708"), ] diff --git a/src/aurora/registration/migrations/0028_auto_20220510_0446.py b/src/aurora/registration/migrations/0028_auto_20220510_0446.py index d17a9a6a..f88af8a1 100644 --- a/src/aurora/registration/migrations/0028_auto_20220510_0446.py +++ b/src/aurora/registration/migrations/0028_auto_20220510_0446.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0027_alter_registration_slug"), ] @@ -13,7 +12,10 @@ class Migration(migrations.Migration): operations = [ migrations.AlterModelOptions( name="registration", - options={"get_latest_by": "start", "permissions": (("can_manage", "Can Manage"),)}, + options={ + "get_latest_by": "start", + "permissions": (("can_manage", "Can Manage"),), + }, ), migrations.AlterField( model_name="record", diff --git a/src/aurora/registration/migrations/0029_registration_client_validation.py b/src/aurora/registration/migrations/0029_registration_client_validation.py index 1c5867d4..5fc5df48 100644 --- a/src/aurora/registration/migrations/0029_registration_client_validation.py +++ b/src/aurora/registration/migrations/0029_registration_client_validation.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0028_auto_20220510_0446"), ] diff --git a/src/aurora/registration/migrations/0030_auto_20220511_1318.py b/src/aurora/registration/migrations/0030_auto_20220511_1318.py index ffc5df94..393843ca 100644 --- a/src/aurora/registration/migrations/0030_auto_20220511_1318.py +++ b/src/aurora/registration/migrations/0030_auto_20220511_1318.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0046_alter_validator_target"), ("registration", "0029_registration_client_validation"), diff --git a/src/aurora/registration/migrations/0031_auto_20220512_1125.py b/src/aurora/registration/migrations/0031_auto_20220512_1125.py index 29f41a41..9b32a263 100644 --- a/src/aurora/registration/migrations/0031_auto_20220512_1125.py +++ b/src/aurora/registration/migrations/0031_auto_20220512_1125.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0030_auto_20220511_1318"), ] diff --git a/src/aurora/registration/migrations/0032_auto_20220512_1744.py b/src/aurora/registration/migrations/0032_auto_20220512_1744.py index 82f7394b..f8270821 100644 --- a/src/aurora/registration/migrations/0032_auto_20220512_1744.py +++ b/src/aurora/registration/migrations/0032_auto_20220512_1744.py @@ -23,27 +23,12 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0046_alter_validator_target"), ("registration", "0031_auto_20220512_1125"), ] operations = [ - # migrations.RunSQL(SQL, REVERSE), - # migrations.RunSQL( - # 'CREATE INDEX CONCURRENTLY "registration_record_unique_field_5edac906" ' - # 'ON "registration_record" ("unique_field");' - # ), - # migrations.RunSQL( - # 'CREATE INDEX CONCURRENTLY "registration_record_unique_field_5edac906_like" ' - # 'ON "registration_record" ("unique_field" varchar_pattern_ops);' - # ), - # migrations.RunSQL( - # 'ALTER TABLE "registration_record" ' - # 'ADD CONSTRAINT "registration_record_registration_id_unique_field_2701a3c6_uniq" ' - # 'UNIQUE ("registration_id", "unique_field");' - # ), migrations.AddField( model_name="record", name="fields", @@ -63,14 +48,20 @@ class Migration(migrations.Migration): model_name="registration", name="unique_field", field=models.CharField( - blank=True, help_text="Form field to be used as unique key", max_length=255, null=True + blank=True, + help_text="Form field to be used as unique key", + max_length=255, + null=True, ), ), migrations.AlterField( model_name="registration", name="scripts", field=models.ManyToManyField( - blank=True, limit_choices_to={"target": "script"}, related_name="script_for", to="core.Validator" + blank=True, + limit_choices_to={"target": "script"}, + related_name="script_for", + to="core.Validator", ), ), migrations.AlterUniqueTogether( diff --git a/src/aurora/registration/migrations/0033_auto_20220604_1314.py b/src/aurora/registration/migrations/0033_auto_20220604_1314.py index 28e62e8a..e4a191a6 100644 --- a/src/aurora/registration/migrations/0033_auto_20220604_1314.py +++ b/src/aurora/registration/migrations/0033_auto_20220604_1314.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0046_alter_validator_target"), ("registration", "0032_auto_20220512_1744"), @@ -15,7 +14,10 @@ class Migration(migrations.Migration): model_name="registration", name="unique_field_error", field=models.CharField( - blank=True, help_text="Error message in case of duplicate 'unique_field'", max_length=255, null=True + blank=True, + help_text="Error message in case of duplicate 'unique_field'", + max_length=255, + null=True, ), ), ] diff --git a/src/aurora/registration/migrations/0034_auto_20220604_2141.py b/src/aurora/registration/migrations/0034_auto_20220604_2141.py index 56e96bfe..86ccab54 100644 --- a/src/aurora/registration/migrations/0034_auto_20220604_2141.py +++ b/src/aurora/registration/migrations/0034_auto_20220604_2141.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0046_alter_validator_target"), ("registration", "0033_auto_20220604_1314"), diff --git a/src/aurora/registration/migrations/0035_registration_unique_field_path.py b/src/aurora/registration/migrations/0035_registration_unique_field_path.py index 0aa50f3a..46bea925 100644 --- a/src/aurora/registration/migrations/0035_registration_unique_field_path.py +++ b/src/aurora/registration/migrations/0035_registration_unique_field_path.py @@ -13,7 +13,10 @@ class Migration(migrations.Migration): model_name="registration", name="unique_field_path", field=models.CharField( - blank=True, help_text="JMESPath expression to retrieve unique field", max_length=1000, null=True + blank=True, + help_text="JMESPath expression to retrieve unique field", + max_length=1000, + null=True, ), ), ] diff --git a/src/aurora/registration/migrations/0036_registration_archived.py b/src/aurora/registration/migrations/0036_registration_archived.py index 4132ba7d..697c4cbd 100644 --- a/src/aurora/registration/migrations/0036_registration_archived.py +++ b/src/aurora/registration/migrations/0036_registration_archived.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0035_registration_unique_field_path"), ] @@ -14,7 +13,8 @@ class Migration(migrations.Migration): model_name="registration", name="archived", field=models.BooleanField( - default=False, help_text="Archived/Terminated registration cannot be activated/reopened" + default=False, + help_text="Archived/Terminated registration cannot be activated/reopened", ), ), ] diff --git a/src/aurora/registration/migrations/0037_auto_20220819_0950.py b/src/aurora/registration/migrations/0037_auto_20220819_0950.py index 9e1a5b06..a2116156 100644 --- a/src/aurora/registration/migrations/0037_auto_20220819_0950.py +++ b/src/aurora/registration/migrations/0037_auto_20220819_0950.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0036_registration_archived"), ] @@ -12,6 +11,9 @@ class Migration(migrations.Migration): operations = [ migrations.AlterModelOptions( name="registration", - options={"get_latest_by": "start", "permissions": (("can_manage", "Can Manage Registration"),)}, + options={ + "get_latest_by": "start", + "permissions": (("can_manage", "Can Manage Registration"),), + }, ), ] diff --git a/src/aurora/registration/migrations/0038_alter_registration_unique_field.py b/src/aurora/registration/migrations/0038_alter_registration_unique_field.py index 3d0e58d0..675b26c5 100644 --- a/src/aurora/registration/migrations/0038_alter_registration_unique_field.py +++ b/src/aurora/registration/migrations/0038_alter_registration_unique_field.py @@ -13,7 +13,10 @@ class Migration(migrations.Migration): model_name="registration", name="unique_field", field=models.CharField( - blank=True, help_text="Form field to be used as unique key (DEPRECATED)", max_length=255, null=True + blank=True, + help_text="Form field to be used as unique key (DEPRECATED)", + max_length=255, + null=True, ), ), ] diff --git a/src/aurora/registration/migrations/0039_auto_20220930_1149.py b/src/aurora/registration/migrations/0039_auto_20220930_1149.py index 1162f1aa..3f51d19a 100644 --- a/src/aurora/registration/migrations/0039_auto_20220930_1149.py +++ b/src/aurora/registration/migrations/0039_auto_20220930_1149.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("auth", "0012_alter_user_first_name_max_length"), ("registration", "0038_alter_registration_unique_field"), @@ -15,7 +14,10 @@ class Migration(migrations.Migration): name="registration", options={ "get_latest_by": "start", - "permissions": (("manage", "Can Manage Registration"), ("register", "Can User Registration")), + "permissions": ( + ("manage", "Can Manage Registration"), + ("register", "Can User Registration"), + ), }, ), migrations.AddField( @@ -31,7 +33,9 @@ class Migration(migrations.Migration): model_name="registration", name="restrict_to_groups", field=models.ManyToManyField( - blank=True, help_text="Restrict access to the following groups", to="auth.Group" + blank=True, + help_text="Restrict access to the following groups", + to="auth.Group", ), ), ] diff --git a/src/aurora/registration/migrations/0040_auto_20221202_1011.py b/src/aurora/registration/migrations/0040_auto_20221202_1011.py index 0885afb3..d8031087 100644 --- a/src/aurora/registration/migrations/0040_auto_20221202_1011.py +++ b/src/aurora/registration/migrations/0040_auto_20221202_1011.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0039_auto_20220930_1149"), ] @@ -16,7 +15,10 @@ class Migration(migrations.Migration): name="registration", options={ "get_latest_by": "start", - "permissions": (("manage", "Can manage Registration"), ("register", "Can use Registration")), + "permissions": ( + ("manage", "Can manage Registration"), + ("register", "Can use Registration"), + ), }, ), migrations.AlterField( diff --git a/src/aurora/registration/migrations/0041_auto_20230123_1856.py b/src/aurora/registration/migrations/0041_auto_20230123_1856.py index 1b57f08a..20b83ce4 100644 --- a/src/aurora/registration/migrations/0041_auto_20230123_1856.py +++ b/src/aurora/registration/migrations/0041_auto_20230123_1856.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - dependencies = [ ("flatpages", "0001_initial"), ("registration", "0040_auto_20221202_1011"), @@ -34,7 +33,10 @@ class Migration(migrations.Migration): model_name="registration", name="welcome_page", field=models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to="flatpages.flatpage" + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="flatpages.flatpage", ), ), migrations.AlterField( diff --git a/src/aurora/registration/migrations/0042_auto_20230125_0942.py b/src/aurora/registration/migrations/0042_auto_20230125_0942.py index 70d18d35..21bda445 100644 --- a/src/aurora/registration/migrations/0042_auto_20230125_0942.py +++ b/src/aurora/registration/migrations/0042_auto_20230125_0942.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0041_auto_20230123_1856"), ] diff --git a/src/aurora/registration/migrations/0043_remove_registration_unique_field.py b/src/aurora/registration/migrations/0043_remove_registration_unique_field.py index 6a29a424..ea1a3887 100644 --- a/src/aurora/registration/migrations/0043_remove_registration_unique_field.py +++ b/src/aurora/registration/migrations/0043_remove_registration_unique_field.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0042_auto_20230125_0942"), ] diff --git a/src/aurora/registration/migrations/0044_auto_20230126_1226.py b/src/aurora/registration/migrations/0044_auto_20230126_1226.py index da36c703..387497d7 100644 --- a/src/aurora/registration/migrations/0044_auto_20230126_1226.py +++ b/src/aurora/registration/migrations/0044_auto_20230126_1226.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("core", "0048_auto_20230126_1226"), ("registration", "0043_remove_registration_unique_field"), @@ -15,11 +14,19 @@ class Migration(migrations.Migration): migrations.AddField( model_name="registration", name="organization", - field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to="core.organization"), + field=models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="core.organization", + ), ), migrations.AddField( model_name="registration", name="project", - field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to="core.project"), + field=models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="core.project", + ), ), ] diff --git a/src/aurora/registration/migrations/0045_auto_20230208_1133.py b/src/aurora/registration/migrations/0045_auto_20230208_1133.py index d9f2a18e..61cc47de 100644 --- a/src/aurora/registration/migrations/0045_auto_20230208_1133.py +++ b/src/aurora/registration/migrations/0045_auto_20230208_1133.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0044_auto_20230126_1226"), ] diff --git a/src/aurora/registration/migrations/0046_auto_20230208_1611.py b/src/aurora/registration/migrations/0046_auto_20230208_1611.py index 404e8e22..8538a2f9 100644 --- a/src/aurora/registration/migrations/0046_auto_20230208_1611.py +++ b/src/aurora/registration/migrations/0046_auto_20230208_1611.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0045_auto_20230208_1133"), ] diff --git a/src/aurora/registration/migrations/0047_remove_registration_restrict_to_groups.py b/src/aurora/registration/migrations/0047_remove_registration_restrict_to_groups.py index aab0edc3..63113007 100644 --- a/src/aurora/registration/migrations/0047_remove_registration_restrict_to_groups.py +++ b/src/aurora/registration/migrations/0047_remove_registration_restrict_to_groups.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0046_auto_20230208_1611"), ] diff --git a/src/aurora/registration/migrations/0048_auto_20230307_1139.py b/src/aurora/registration/migrations/0048_auto_20230307_1139.py index f5b00c6e..1467c04a 100644 --- a/src/aurora/registration/migrations/0048_auto_20230307_1139.py +++ b/src/aurora/registration/migrations/0048_auto_20230307_1139.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0047_remove_registration_restrict_to_groups"), ] diff --git a/src/aurora/registration/migrations/0049_record_registrar.py b/src/aurora/registration/migrations/0049_record_registrar.py index 8c0ede6f..3452595f 100644 --- a/src/aurora/registration/migrations/0049_record_registrar.py +++ b/src/aurora/registration/migrations/0049_record_registrar.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ("registration", "0048_auto_20230307_1139"), @@ -17,7 +16,10 @@ class Migration(migrations.Migration): model_name="record", name="registrar", field=models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), ] diff --git a/src/aurora/registration/migrations/0050_auto_20230327_1205.py b/src/aurora/registration/migrations/0050_auto_20230327_1205.py index 8a082128..f09621c2 100644 --- a/src/aurora/registration/migrations/0050_auto_20230327_1205.py +++ b/src/aurora/registration/migrations/0050_auto_20230327_1205.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("registration", "0049_record_registrar"), ] diff --git a/src/aurora/registration/migrations/0051_auto_20231123_0605.py b/src/aurora/registration/migrations/0051_auto_20231123_0605.py new file mode 100644 index 00000000..1cd5b8e6 --- /dev/null +++ b/src/aurora/registration/migrations/0051_auto_20231123_0605.py @@ -0,0 +1,38 @@ +# Generated by Django 3.2.18 on 2023-11-23 06:05 + +import django.contrib.postgres.fields.citext +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("core", "0054_auto_20231123_0605"), + ("registration", "0050_auto_20230327_1205"), + ] + + operations = [ + migrations.AlterField( + model_name="registration", + name="name", + field=django.contrib.postgres.fields.citext.CICharField(max_length=255), + ), + migrations.AlterField( + model_name="registration", + name="project", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="registrations", + to="core.project", + ), + ), + migrations.AlterField( + model_name="registration", + name="slug", + field=models.SlugField(blank=True, max_length=500, null=True), + ), + migrations.AlterUniqueTogether( + name="registration", + unique_together={("name", "project")}, + ), + ] diff --git a/src/aurora/registration/migrations/0052_alter_registration_slug.py b/src/aurora/registration/migrations/0052_alter_registration_slug.py new file mode 100644 index 00000000..ab20e265 --- /dev/null +++ b/src/aurora/registration/migrations/0052_alter_registration_slug.py @@ -0,0 +1,17 @@ +# Generated by Django 3.2.23 on 2023-12-14 14:47 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("registration", "0051_auto_20231123_0605"), + ] + + operations = [ + migrations.AlterField( + model_name="registration", + name="slug", + field=models.SlugField(blank=True, max_length=500, null=True, unique=True), + ), + ] diff --git a/src/aurora/registration/migrations/0053_alter_registration_name.py b/src/aurora/registration/migrations/0053_alter_registration_name.py new file mode 100644 index 00000000..121c5c9c --- /dev/null +++ b/src/aurora/registration/migrations/0053_alter_registration_name.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.10 on 2024-05-02 23:32 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("registration", "0052_alter_registration_slug"), + ] + + operations = [ + migrations.AlterField( + model_name="registration", + name="name", + field=models.CharField(db_collation="_", max_length=255), + ), + ] diff --git a/src/aurora/registration/models.py b/src/aurora/registration/models.py index 757c2f83..def5b3f7 100644 --- a/src/aurora/registration/models.py +++ b/src/aurora/registration/models.py @@ -2,18 +2,18 @@ import json import logging -import jmespath -from concurrency.fields import AutoIncVersionField -from Crypto.PublicKey import RSA from django.conf import settings from django.contrib.flatpages.models import FlatPage -from django.contrib.postgres.fields import CICharField from django.db import models from django.utils import timezone, translation from django.utils.functional import cached_property from django.utils.text import slugify from django.utils.translation import gettext as _ -from natural_keys import NaturalKeyModel + +import jmespath +from concurrency.fields import AutoIncVersionField +from Crypto.PublicKey import RSA +from natural_keys import NaturalKeyModel, NaturalKeyModelManager from strategy_field.fields import StrategyField from strategy_field.utils import fqn @@ -39,8 +39,13 @@ undefined = object() +class RegistrationManager(NaturalKeyModelManager): + def get_queryset(self): + return super().get_queryset().select_related("project", "project__organization") + + class Registration(NaturalKeyModel, I18NModel, models.Model): - _natural_key = ("slug",) + _natural_key = ("slug", "project") ADVANCED_DEFAULT_ATTRS = { "smart": { @@ -55,26 +60,35 @@ class Registration(NaturalKeyModel, I18NModel, models.Model): version = AutoIncVersionField() last_update_date = models.DateTimeField(auto_now=True) - name = CICharField(max_length=255, unique=True) + name = models.CharField(max_length=255, db_collation="_") title = models.CharField(max_length=500, blank=True, null=True) slug = models.SlugField(max_length=500, blank=True, null=True, unique=True) - project = models.ForeignKey(Project, null=True, on_delete=models.SET_NULL) + project = models.ForeignKey(Project, on_delete=models.CASCADE, related_name="registrations") flex_form = models.ForeignKey(FlexForm, on_delete=models.PROTECT) start = models.DateField(default=timezone.now, editable=True) end = models.DateField(blank=True, null=True) active = models.BooleanField(default=False) archived = models.BooleanField( - default=False, null=False, help_text=_("Archived/Terminated registration cannot be activated/reopened") + default=False, + null=False, + help_text=_("Archived/Terminated registration cannot be activated/reopened"), ) locale = models.CharField( - verbose_name="Default locale", max_length=10, choices=settings.LANGUAGES, default=settings.LANGUAGE_CODE + verbose_name="Default locale", + max_length=10, + choices=settings.LANGUAGES, + default=settings.LANGUAGE_CODE, ) dry_run = models.BooleanField(default=False) handler = StrategyField(registry=strategies, default=None, blank=True, null=True) show_in_homepage = models.BooleanField(default=False) welcome_page = models.ForeignKey(FlatPage, blank=True, null=True, on_delete=models.SET_NULL) - locales = ChoiceArrayField(models.CharField(max_length=10, choices=settings.LANGUAGES), blank=True, null=True) + locales = ChoiceArrayField( + models.CharField(max_length=10, choices=settings.LANGUAGES), + blank=True, + null=True, + ) intro = models.TextField(blank=True, null=True, default="") footer = models.TextField(blank=True, null=True, default="") client_validation = models.BooleanField(blank=True, null=False, default=False) @@ -88,14 +102,23 @@ class Registration(NaturalKeyModel, I18NModel, models.Model): ) scripts = models.ManyToManyField( - Validator, related_name="script_for", limit_choices_to={"target": Validator.SCRIPT}, blank=True + Validator, + related_name="script_for", + limit_choices_to={"target": Validator.SCRIPT}, + blank=True, ) unique_field_path = models.CharField( - max_length=1000, blank=True, null=True, help_text="JMESPath expression to retrieve unique field" + max_length=1000, + blank=True, + null=True, + help_text="JMESPath expression to retrieve unique field", ) unique_field_error = models.CharField( - max_length=255, blank=True, null=True, help_text="Error message in case of duplicate 'unique_field'" + max_length=255, + blank=True, + null=True, + help_text="Error message in case of duplicate 'unique_field'", ) public_key = models.TextField( blank=True, @@ -110,8 +133,11 @@ class Registration(NaturalKeyModel, I18NModel, models.Model): is_pwa_enabled = models.BooleanField(default=False) export_allowed = models.BooleanField(default=False) + objects = RegistrationManager() + class Meta: get_latest_by = "start" + unique_together = ("name", "project") permissions = ( ("can_manage_registration", _("Can manage Registration")), ("register", _("Can use Registration")), @@ -121,16 +147,34 @@ class Meta: ) ordering = ("name", "title") - @property - def media(self): - return VersionMedia(js=[script.get_script_url() for script in self.scripts.all()]) - def __str__(self): return self.name + def save(self, force_insert=False, force_update=False, using=None, update_fields=None): + if not self.slug: + self.slug = slugify(self.name) + if not self.title: + self.title = self.name + dict_setdefault(self.advanced, self.ADVANCED_DEFAULT_ATTRS) + super().save(force_insert, force_update, using, update_fields) + def get_absolute_url(self): return cache_aware_reverse("register", args=[self.slug, self.version]) + @property + def media(self): + return VersionMedia(js=[script.get_script_url() for script in self.scripts.all()]) + + @cached_property + def organization(self): + return self.project.organization + + def is_running(self) -> bool: + today = timezone.now().today().date() + if not self.end: + return True + return self.start <= today <= self.end + def get_i18n_url(self, lang=None): translation.activate(language=lang or self.locale) return cache_aware_reverse("register", args=[self.slug, self.version]) @@ -138,16 +182,7 @@ def get_i18n_url(self, lang=None): def get_welcome_url(self): if self.welcome_page: return self.welcome_page.get_absolute_url() - else: - return self.get_absolute_url() - - def save(self, force_insert=False, force_update=False, using=None, update_fields=None): - if not self.slug: - self.slug = slugify(self.name) - if not self.title: - self.title = self.name - dict_setdefault(self.advanced, self.ADVANCED_DEFAULT_ATTRS) - super().save(force_insert, force_update, using, update_fields) + return self.get_absolute_url() def setup_encryption_keys(self): key = RSA.generate(2048) @@ -167,48 +202,10 @@ def encrypt(self, value): def add_record(self, fields_data): if not self.handler: return SaveToDB(self).save(fields_data) + if not self.is_running(): + raise Exception("Registration is expired") return self.handler.save(fields_data) - # - # def _add_record(self, fields_data): - # fields, files = router.decompress(fields_data) - # crypter = Crypto() - # if self.public_key: - # kwargs = { - # # "storage": self.encrypt(fields_data), - # "files": self.encrypt(files), - # "fields": base64.b64encode(self.encrypt(fields)).decode(), - # } - # elif self.encrypt_data: - # kwargs = { - # # "storage": Crypto().encrypt(fields_data).encode(), - # "files": crypter.encrypt(files).encode(), - # "fields": crypter.encrypt(fields), - # } - # else: - # kwargs = { - # # "storage": safe_json(fields_data).encode(), - # "files": safe_json(files).encode(), - # "fields": jsonfy(fields), - # } - # if self.unique_field_path and not kwargs.get("unique_field", None): - # unique_value = self.get_unique_value(fields) - # kwargs["unique_field"] = unique_value - # if state.request and state.request.user.is_authenticated: - # registrar = state.request.user - # else: - # registrar = None - # kwargs.update( - # { - # "registrar": registrar, - # "size": total_size(fields) + total_size(files), - # "counters": fields_data.get("counters", {}), - # "index1": fields_data.get("index1", None), - # } - # ) - # - # return Record.objects.create(registration=self, **kwargs) - def get_unique_value(self, cleaned_data): unique_value = None if self.unique_field_path: @@ -231,11 +228,12 @@ def all_locales(self): @property def option_set_links(self): - links = [] - for field in self.flex_form.fields.all(): - if field.field_type == AjaxSelectField: - links.append(f"/en-us/options/{field.choices}/") # TODO: is en-us always valid? - return links + # TODO: is en-us always valid? + return [ + f"/en-us/options/{field.choices}/" + for field in self.flex_form.fields.all() + if field.field_type == AjaxSelectField + ] @cached_property def metadata(self): @@ -249,7 +247,7 @@ def _get_validator(owner): def _get_field_details(flex_field: FlexFormField): kwargs = flex_field.get_field_kwargs() return { - "type": fqn(flex_field.field_type), + "type": fqn(flex_field.field_type) if flex_field.field_type else None, "label": flex_field.label, "name": flex_field.name, "smart_attrs": kwargs["smart_attrs"], @@ -302,8 +300,6 @@ class Record(models.Model): ignored = models.BooleanField(default=False, blank=True, null=True) size = models.IntegerField(blank=True, null=True) counters = models.JSONField(blank=True, null=True) - # cleared = models.BooleanField(default=True, blank=True, - # help_text="Not cleared Records will not be fetched by HOPE") fields = models.JSONField(null=True, blank=True) files = models.BinaryField(null=True, blank=True) @@ -314,29 +310,29 @@ class Record(models.Model): is_offline = models.BooleanField(default=False) registrar = models.ForeignKey(settings.AUTH_USER_MODEL, blank=True, null=True, on_delete=models.SET_NULL) - @property - def fields_data(self): - if self.is_offline and len(self.fields) > 12_000: - return "String too long to display..." - else: - return self.fields - class Meta: unique_together = ("registration", "unique_field") + def __str__(self): + return f"{self.registration} - {self.pk}" + + @property + def fields_data(self): + return "String too long to display..." if self.is_offline and len(self.fields) > 12_000 else self.fields + def decrypt(self, private_key=undefined, secret=undefined): if self.is_offline: fields = json.loads(decrypt_offline(self.fields, private_key)) return router.compress(fields, {}) - else: - if private_key != undefined: - files = json.loads(decrypt(self.files, private_key)) - fields = json.loads(decrypt(base64.b64decode(self.fields), private_key)) - return router.compress(fields, files) - elif secret != undefined: - files = json.loads(Crypto(secret).decrypt(self.files)) - fields = json.loads(Crypto(secret).decrypt(self.fields)) - return router.compress(fields, files) + if private_key != undefined: + files = json.loads(decrypt(self.files, private_key)) + fields = json.loads(decrypt(base64.b64decode(self.fields), private_key)) + return router.compress(fields, files) + if secret != undefined: + files = json.loads(Crypto(secret).decrypt(self.files)) + fields = json.loads(Crypto(secret).decrypt(self.fields)) + return router.compress(fields, files) + return None @property def unicef_id(self): @@ -346,20 +342,19 @@ def unicef_id(self): def data(self): if self.registration.public_key: return {"Forbidden": "Cannot access encrypted data"} - elif self.registration.encrypt_data: + if self.registration.encrypt_data: return self.decrypt(secret=None) - else: - files = {} - f = self.files - if f: - if not isinstance(f, bytes): - f = self.files.tobytes() - files = json.loads(f.decode()) - return merge(files, self.fields or {}) + files = {} + f = self.files + if f: + if not isinstance(f, bytes): + f = self.files.tobytes() + files = json.loads(f.decode()) + return merge(files, self.fields or {}) def merge(a, b, path=None, update=True): - """merges b into a""" + """Merge b into a.""" if path is None: path = [] for key in b: @@ -369,8 +364,13 @@ def merge(a, b, path=None, update=True): elif a[key] == b[key]: pass # same leaf value elif isinstance(a[key], list) and isinstance(b[key], list): - for idx, val in enumerate(b[key]): - a[key][idx] = merge(a[key][idx], b[key][idx], path + [str(key), str(idx)], update=update) + for idx, _ in enumerate(b[key]): + a[key][idx] = merge( + a[key][idx], + b[key][idx], + path + [str(key), str(idx)], + update=update, + ) elif update: a[key] = b[key] else: diff --git a/src/aurora/registration/storage.py b/src/aurora/registration/storage.py index 8ba2b8a0..70f69703 100644 --- a/src/aurora/registration/storage.py +++ b/src/aurora/registration/storage.py @@ -17,7 +17,7 @@ def clean_dict(d, filter_func): if filter_func(value): del d[key] continue - elif isinstance(value, dict): + if isinstance(value, dict): new_val = clean_dict(value, filter_func) elif isinstance(value, list) and value: if isinstance(value[0], dict): @@ -33,7 +33,10 @@ def clean_dict(d, filter_func): class Router: def compress(self, fields, files): - ff = apply_nested(files, lambda v, k: SimpleUploadedFile(k, v if isinstance(v, bytes) else v.encode())) + ff = apply_nested( + files, + lambda v, k: SimpleUploadedFile(k, v if isinstance(v, bytes) else v.encode()), + ) return merge_data(fields, ff) def decompress(self, data): diff --git a/src/aurora/registration/strategies.py b/src/aurora/registration/strategies.py index fd53ac85..628960d6 100644 --- a/src/aurora/registration/strategies.py +++ b/src/aurora/registration/strategies.py @@ -1,8 +1,9 @@ import base64 -from django.db.transaction import atomic +from django.db.transaction import atomic from django.shortcuts import render from django.urls import reverse + from strategy_field.registry import Registry from aurora.core.crypto import Crypto @@ -29,19 +30,16 @@ def save(self, fields_data, **kwargs): crypter = Crypto() if self.registration.public_key: kwargs = { - # "storage": self.encrypt(fields_data), "files": self.registration.encrypt(files), "fields": base64.b64encode(self.registration.encrypt(fields)).decode(), } elif self.registration.encrypt_data: kwargs = { - # "storage": Crypto().encrypt(fields_data).encode(), "files": crypter.encrypt(files).encode(), "fields": crypter.encrypt(fields), } else: kwargs = { - # "storage": safe_json(fields_data).encode(), "files": safe_json(files).encode(), "fields": jsonfy(fields), } @@ -118,12 +116,13 @@ def save(self, fields_data, **kwargs): "counters": fields_data.get("counters", {}), "index1": fields_data.get("index1", None), "fields": fields, - # "files": files, } ) record = Record(registration=self.registration, **kwargs) return render( - state.request, "registration/test_registration.html", {"record": record, "fields_data": fields_data} + state.request, + "registration/test_registration.html", + {"record": record, "fields_data": fields_data}, ) diff --git a/src/aurora/registration/templates/registration/_staff_toolbar.html b/src/aurora/registration/templates/registration/_staff_toolbar.html index a484084e..ad883f11 100644 --- a/src/aurora/registration/templates/registration/_staff_toolbar.html +++ b/src/aurora/registration/templates/registration/_staff_toolbar.html @@ -1,7 +1,7 @@ {% load static itrans %}{% if user.is_staff %}