From 99d791a0915ff47c6904b7ce45ca4768d31eda6a Mon Sep 17 00:00:00 2001 From: Sylvain MARIE Date: Wed, 4 Sep 2024 13:20:21 +0200 Subject: [PATCH 01/11] Fixed dead link at the bottom of the generated gallery examples. Fixes #97 --- docs/changelog.md | 4 ++++ src/mkdocs_gallery/gen_single.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/docs/changelog.md b/docs/changelog.md index 790c47f8..9a23e965 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -1,5 +1,9 @@ # Changelog +### 0.10.2 - Bugfixes + +- Fixed dead link at the bottom of the generated gallery examples. Fixes [#97](https://github.com/smarie/mkdocs-gallery/issues/97). + ### 0.10.1 - More flexible gallery folders - `examples` folder is not required to be in a subfolder of `docs` anymore. Fixes [#54](https://github.com/smarie/mkdocs-gallery/issues/54). PR [#92](https://github.com/smarie/mkdocs-gallery/pull/92) by [Louis-Pujol](https://github.com/Louis-Pujol). diff --git a/src/mkdocs_gallery/gen_single.py b/src/mkdocs_gallery/gen_single.py index 3967e97f..a65ef31a 100644 --- a/src/mkdocs_gallery/gen_single.py +++ b/src/mkdocs_gallery/gen_single.py @@ -145,7 +145,7 @@ def __exit__(self, type_, value, tb): # TODO only if html ? .. only:: html MKD_GLR_SIG = """\n -[Gallery generated by mkdocs-gallery](https://mkdocs-gallery.github.io){: .mkd-glr-signature } +[Gallery generated by mkdocs-gallery](https://smarie.github.io/mkdocs-gallery){: .mkd-glr-signature } """ # Header used to include raw html from data _repr_html_ From 4e5ef758033ff2af049c0c9549e958ce2b3e63f5 Mon Sep 17 00:00:00 2001 From: Sylvain MARIE Date: Wed, 4 Sep 2024 13:27:14 +0200 Subject: [PATCH 02/11] Fixed compliance issue with `mkdocs-material`'s metadata declaration feature. Fixes #96 --- docs/changelog.md | 1 + src/mkdocs_gallery/gen_single.py | 5 ++--- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/changelog.md b/docs/changelog.md index 9a23e965..a68c4bf9 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -3,6 +3,7 @@ ### 0.10.2 - Bugfixes - Fixed dead link at the bottom of the generated gallery examples. Fixes [#97](https://github.com/smarie/mkdocs-gallery/issues/97). +- Fixed compliance issue with `mkdocs-material`'s [metadata declaration feature](https://squidfunk.github.io/mkdocs-material/reference/#usage). Fixes [#96](https://github.com/smarie/mkdocs-gallery/issues/96). ### 0.10.1 - More flexible gallery folders diff --git a/src/mkdocs_gallery/gen_single.py b/src/mkdocs_gallery/gen_single.py index a65ef31a..08266c6c 100644 --- a/src/mkdocs_gallery/gen_single.py +++ b/src/mkdocs_gallery/gen_single.py @@ -458,13 +458,12 @@ def generate(gallery: GalleryBase, seen_backrefs: Set) -> Tuple[str, str, str, L all_thumbnail_entries.append(thumb_div) # Write the gallery summary index.md - index_md = f""" - -{readme_contents} + index_md = f"""{readme_contents} {"".join(all_thumbnail_entries)}
+ """ # Note: the "clear" is to disable floating elements again, now that the gallery section is over. From 8de7a824145a1617f20c12b113c138d2aa9e94fe Mon Sep 17 00:00:00 2001 From: Sylvain MARIE Date: Wed, 4 Sep 2024 13:47:54 +0200 Subject: [PATCH 03/11] Fixed nox, GHA workflows, and added a comment --- .github/workflows/base.yml | 51 ++++++++++++++++++------- .github/workflows/updater.yml | 24 ++++++++++++ ci_tools/.pylintrc | 2 +- ci_tools/flake8-requirements.txt | 2 +- ci_tools/github_release.py | 10 ++--- ci_tools/nox_utils.py | 3 +- noxfile.py | 64 ++++++++++++++++---------------- src/mkdocs_gallery/gen_single.py | 2 + 8 files changed, 105 insertions(+), 53 deletions(-) create mode 100644 .github/workflows/updater.yml diff --git a/.github/workflows/base.yml b/.github/workflows/base.yml index f2b993dd..5bf0211c 100644 --- a/.github/workflows/base.yml +++ b/.github/workflows/base.yml @@ -23,10 +23,10 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v4.1.1 - name: Install python 3.9 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5.0.0 with: python-version: 3.9 architecture: x64 @@ -36,7 +36,7 @@ jobs: - name: List 'tests' nox sessions and required python versions id: set-matrix - run: echo "matrix=$(nox -s gha_list -- -s tests -v)" >> $GITHUB_OUTPUT + run: echo "::set-output name=matrix::$(nox --json -l -s tests -v)" outputs: matrix: ${{ steps.set-matrix.outputs.matrix }} # save nox sessions list to outputs @@ -46,6 +46,8 @@ jobs: strategy: fail-fast: false matrix: + # see https://github.com/actions/setup-python/issues/544 + # os: [ ubuntu-20.04 ] os: [ ubuntu-latest, windows-latest ] # , macos-latest, windows-latest] # all nox sessions: manually > dynamically from previous job # nox_session: ["tests-2.7", "tests-3.7"] @@ -55,20 +57,41 @@ jobs: runs-on: ${{ matrix.os }} steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v4.1.1 - name: Install python ${{ matrix.nox_session.python }} for tests - uses: actions/setup-python@v4 + if: ${{ ! contains(fromJson('["3.13"]'), matrix.nox_session.python ) }} + uses: MatteoH2O1999/setup-python@v3.2.1 # actions/setup-python@v5.0.0 + id: set-py with: python-version: ${{ matrix.nox_session.python }} architecture: x64 + allow-build: info + cache-build: true - - name: Install python 3.9 for nox - uses: actions/setup-python@v4 + - name: Install python ${{ matrix.nox_session.python }} for tests (3.13) + if: contains(fromJson('["3.13"]'), matrix.nox_session.python ) + uses: actions/setup-python@v5 + id: set-py-latest with: - python-version: 3.9 + # Include all versions including pre releases + # See https://github.com/actions/setup-python/blob/main/docs/advanced-usage.md#specifying-a-python-version + python-version: ${{ format('~{0}.0-alpha.0', matrix.nox_session.python) }} + architecture: x64 + allow-build: info + cache-build: true + + - name: Install python 3.12 for nox + uses: actions/setup-python@v5.0.0 + with: + python-version: 3.12 architecture: x64 + - name: pin virtualenv==20.15.1 in old python versions + # pinned to keep compatibility with old versions, see https://github.com/MatteoH2O1999/setup-python/issues/28#issuecomment-1745613621 + if: contains(fromJson('["2.7", "3.5", "3.6"]'), matrix.nox_session.python ) + run: sed -i "s/virtualenv/virtualenv==20.15.1/g" noxfile-requirements.txt + - name: Setup headless display uses: pyvista/setup-headless-display-action@v2 with: @@ -93,10 +116,10 @@ jobs: # if: github.event_name == 'pull_request' # steps: # - name: Checkout -# uses: actions/checkout@v2 +# uses: actions/checkout@v4.1.1 # # - name: Install python 3.9 for nox -# uses: actions/setup-python@v4 +# uses: actions/setup-python@v5.0.0 # with: # python-version: 3.9 # architecture: x64 @@ -118,12 +141,12 @@ jobs: run: echo "$GITHUB_CONTEXT" - name: Checkout with no depth - uses: actions/checkout@v2 + uses: actions/checkout@v4.1.1 with: fetch-depth: 0 # so that gh-deploy works - name: Install python 3.9 for nox - uses: actions/setup-python@v4 + uses: actions/setup-python@v5.0.0 with: python-version: 3.9 architecture: x64 @@ -135,7 +158,7 @@ jobs: # 1) retrieve the reports generated previously - name: Retrieve reports - uses: actions/download-artifact@master + uses: actions/download-artifact@v4.1.1 with: name: reports_dir path: ./docs/reports @@ -167,7 +190,7 @@ jobs: EOF - name: \[not on TAG\] Publish coverage report if: github.event_name == 'push' && startsWith(github.ref, 'refs/heads') - uses: codecov/codecov-action@v1 + uses: codecov/codecov-action@v4.0.1 with: files: ./docs/reports/coverage/coverage.xml diff --git a/.github/workflows/updater.yml b/.github/workflows/updater.yml new file mode 100644 index 00000000..7ccec7f8 --- /dev/null +++ b/.github/workflows/updater.yml @@ -0,0 +1,24 @@ +name: GitHub Actions Version Updater + +# Controls when the action will run. +on: + workflow_dispatch: + schedule: + # Automatically run on every first day of the month + - cron: '0 0 1 * *' + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4.1.1 + with: + # [Required] Access token with `workflow` scope. + token: ${{ secrets.WORKFLOW_SECRET }} + + - name: Run GitHub Actions Version Updater + uses: saadmk11/github-actions-version-updater@v0.8.1 + with: + # [Required] Access token with `workflow` scope. + token: ${{ secrets.WORKFLOW_SECRET }} diff --git a/ci_tools/.pylintrc b/ci_tools/.pylintrc index 93518894..85272735 100644 --- a/ci_tools/.pylintrc +++ b/ci_tools/.pylintrc @@ -5,7 +5,7 @@ # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). -# init-hook="import odsclient" +# init-hook="import mkdocs-gallery" # Add files or directories to the blacklist. They should be base names, not # paths. diff --git a/ci_tools/flake8-requirements.txt b/ci_tools/flake8-requirements.txt index e9628298..7281618a 100644 --- a/ci_tools/flake8-requirements.txt +++ b/ci_tools/flake8-requirements.txt @@ -2,7 +2,7 @@ setuptools_scm>=3,<4 flake8>=3.6,<4 flake8-html>=0.4,<1 flake8-bandit>=2.1.1,<3 -bandit<1.7.3 +bandit<1.7.3 # temporary until this is fixed https://github.com/tylerwince/flake8-bandit/issues/21 flake8-bugbear>=20.1.0,<21.0.0 flake8-docstrings>=1.5,<2 flake8-print>=3.1.1,<4 diff --git a/ci_tools/github_release.py b/ci_tools/github_release.py index 4a11040d..3d132986 100644 --- a/ci_tools/github_release.py +++ b/ci_tools/github_release.py @@ -114,11 +114,11 @@ def create_or_update_release(user, pwd, secret, repo_slug, changelog_file, doc_u # # # release.id # 13928525 # release.etag # 'W/"dfab7a13086d1b44fe290d5d04125124"' - # release.url # 'https://api.github.com/repos/smarie/python-odsclient/releases/13928525' - # release.html_url # 'https://github.com/smarie/python-odsclient/releases/tag/0.5.0' - # release.tarball_url # 'https://api.github.com/repos/smarie/python-odsclient/tarball/0.5.0' - # release.zipball_url # 'https://api.github.com/repos/smarie/python-odsclient/zipball/0.5.0' - # release.upload_url # 'https://uploads.github.com/repos/smarie/python-odsclient/releases/13928525/assets{?name,label}' + # release.url # 'https://api.github.com/repos/smarie/python-mkdocs-gallery/releases/13928525' + # release.html_url # 'https://github.com/smarie/python-mkdocs-gallery/releases/tag/0.5.0' + # release.tarball_url # 'https://api.github.com/repos/smarie/python-mkdocs-gallery/tarball/0.5.0' + # release.zipball_url # 'https://api.github.com/repos/smarie/python-mkdocs-gallery/zipball/0.5.0' + # release.upload_url # 'https://uploads.github.com/repos/smarie/python-mkdocs-gallery/releases/13928525/assets{?name,label}' if __name__ == '__main__': diff --git a/ci_tools/nox_utils.py b/ci_tools/nox_utils.py index 94f922cb..b7aebfa5 100644 --- a/ci_tools/nox_utils.py +++ b/ci_tools/nox_utils.py @@ -633,7 +633,8 @@ def _f_wrapper(**kwargs): except KeyError: # Skip this session, it is a dummy one nox_logger.warning( - "Skipping configuration, this is not supported in python version %r" % session.python + "Skipping configuration, %r is not meant to be executed in this session for python version %r" % + (grid_param if has_parameter else "this", session.python) ) return diff --git a/noxfile.py b/noxfile.py index 59a4acfc..7d4b6591 100644 --- a/noxfile.py +++ b/noxfile.py @@ -2,12 +2,12 @@ from itertools import product from json import dumps import logging -import os -import sys import nox # noqa +import os from packaging import version from pathlib import Path # noqa +import sys # add parent folder to python path so that we can import noxfile_utils.py # note that you need to "pip install -r noxfile-requiterements.txt" for this file to work. @@ -118,7 +118,11 @@ def tests(session: PowerSession, coverage, pkg_specs): # session.run2("conda list", env={"CONDA_PREFIX": str(conda_prefix), "CONDA_DEFAULT_ENV": session.get_session_id()}) # Fail if the assumed python version is not the actual one - session.run2("python ci_tools/check_python_version.py %s" % session.python) + session.run2(f"python ci_tools/check_python_version.py {session.python}") + + # check that it can be imported even from a different folder + # Important: do not surround the command into double quotes as in the shell ! + # session.run('python', '-c', 'import os; os.chdir(\'./docs/\'); import %s' % pkg_name) # finally run all tests if not coverage: @@ -148,36 +152,33 @@ def tests(session: PowerSession, coverage, pkg_specs): versions_dct=pkg_specs) # --coverage + junit html reports - session.run2("coverage run --source src/{pkg_name} " - "-m pytest --cache-clear --junitxml='{test_xml}' --html='{test_html}' -v tests/" - "".format(pkg_name=pkg_name, test_xml=Folders.test_xml, test_html=Folders.test_html)) + session.run2(f"coverage run --source src/{pkg_name} " + f"-m pytest --cache-clear " + f'--junitxml="{Folders.test_xml}" --html="{Folders.test_html}" ' + f"-v tests/") # -- use the doc generation for coverage if cannot_run_mayavi: - session.run2("coverage run --append --source src/{pkg_name} -m mkdocs build -f mkdocs-no-mayavi.yml" - "".format(pkg_name=pkg_name, test_xml=Folders.test_xml, test_html=Folders.test_html)) + session.run2(f"coverage run --append --source src/{pkg_name} -m mkdocs build -f mkdocs-no-mayavi.yml") else: - session.run2("coverage run --append --source src/{pkg_name} -m mkdocs build -f mkdocs.yml" - "".format(pkg_name=pkg_name, test_xml=Folders.test_xml, test_html=Folders.test_html)) + session.run2(f"coverage run --append --source src/{pkg_name} -m mkdocs build -f mkdocs.yml") # -- add a second build so that we can go through the caching/md5 side if cannot_run_mayavi: - session.run2("coverage run --append --source src/{pkg_name} -m mkdocs build -f mkdocs-no-mayavi.yml" - "".format(pkg_name=pkg_name, test_xml=Folders.test_xml, test_html=Folders.test_html)) + session.run2(f"coverage run --append --source src/{pkg_name} -m mkdocs build -f mkdocs-no-mayavi.yml") else: - session.run2("coverage run --append --source src/{pkg_name} -m mkdocs build -f mkdocs.yml" - "".format(pkg_name=pkg_name, test_xml=Folders.test_xml, test_html=Folders.test_html)) + session.run2(f"coverage run --append --source src/{pkg_name} -m mkdocs build -f mkdocs.yml") session.run2("coverage report") - session.run2("coverage xml -o '{covxml}'".format(covxml=Folders.coverage_xml)) - session.run2("coverage html -d '{dst}'".format(dst=Folders.coverage_reports)) + session.run2(f'coverage xml -o "{Folders.coverage_xml}"') + session.run2(f'coverage html -d "{Folders.coverage_reports}"') # delete this intermediate file, it is not needed anymore rm_file(Folders.coverage_intermediate_file) # --generates the badge for the test results and fail build if less than x% tests pass nox_logger.info("Generating badge for tests coverage") # Use our own package to generate the badge - session.run2("genbadge tests -i '%s' -o '%s' -t 100" % (Folders.test_xml, Folders.test_badge)) - session.run2("genbadge coverage -i '%s' -o '%s'" % (Folders.coverage_xml, Folders.coverage_badge)) + session.run2(f'genbadge tests -i "{Folders.test_xml}" -o "{Folders.test_badge}" -t 100') + session.run2(f'genbadge coverage -i "{Folders.coverage_xml}" -o "{Folders.coverage_badge}"') # Cleanup if os.path.exists("mkdocs-no-mayavi.yml"): os.remove("mkdocs-no-mayavi.yml") @@ -188,7 +189,6 @@ def flake8(session: PowerSession): """Launch flake8 qualimetry.""" session.install("-r", str(Folders.ci_tools / "flake8-requirements.txt")) - session.install("genbadge[flake8]") session.install2('.') rm_folder(Folders.flake8_reports) @@ -201,7 +201,7 @@ def flake8(session: PowerSession): session.run("flake8", pkg_name, "--exit-zero", "--format=html", "--htmldir", str(Folders.flake8_reports), "--statistics", "--tee", "--output-file", str(Folders.flake8_intermediate_file)) # generate our badge - session.run2("genbadge flake8 -i '%s' -o '%s'" % (Folders.flake8_intermediate_file, Folders.flake8_badge)) + session.run2(f'genbadge flake8 -i "{Folders.flake8_intermediate_file}" -o "{Folders.flake8_badge}"') rm_file(Folders.flake8_intermediate_file) @@ -231,10 +231,10 @@ def docs(session: PowerSession): session.install2('.') if session.posargs: - # use posargs instead of "serve" + # use posargs instead of "build" session.run2("mkdocs %s" % " ".join(session.posargs)) else: - session.run2("mkdocs serve -f mkdocs.yml") + session.run2("mkdocs build -f mkdocs.yml") @power_session(python=[PY39]) @@ -313,10 +313,9 @@ def my_scheme(version_): # create the github release session.install_reqs(phase="release", phase_reqs=["click", "PyGithub"]) - session.run2("python ci_tools/github_release.py -s {gh_token} " - "--repo-slug {gh_org}/{gh_repo} -cf ./docs/changelog.md " - "-d https://{gh_org}.github.io/{gh_repo}/changelog {tag}" - "".format(gh_token=gh_token, gh_org=gh_org, gh_repo=gh_repo, tag=current_tag)) + session.run2(f"python ci_tools/github_release.py -s {gh_token} " + f"--repo-slug {gh_org}/{gh_repo} -cf ./docs/changelog.md " + f"-d https://{gh_org}.github.io/{gh_repo}/changelog {current_tag}") @nox.session(python=False) @@ -345,15 +344,18 @@ def gha_list(session): session_func.parametrize except AttributeError: if additional_args.with_version: - sessions_list = [{"python": py, "session": "%s-%s" % (session_func.__name__, py)} for py in session_func.python] + sessions_list = [{"python": py, "session": f"{session_func.__name__}-{py}"} for py in session_func.python] else: - sessions_list = ["%s-%s" % (session_func.__name__, py) for py in session_func.python] + sessions_list = [f"{session_func.__name__}-{py}" for py in session_func.python] else: if additional_args.with_version: - sessions_list = [{"python": py, "session": "%s-%s(%s)" % (session_func.__name__, py, param)} - for py, param in product(session_func.python, session_func.parametrize)] + # sessions_list = [{"python": py, "session": f"{session_func.__name__}-{py}({param})"} + # for py, param in product(session_func.python, session_func.parametrize)] + # Hack to return the valid ones only, in order # TODO remove this hack when ENV is removed + sessions_list = [{"python": py, "session": f"{session_func.__name__}-{py}(env='{env}')"} + for py, env in ENVS.keys()] else: - sessions_list = ["%s-%s(%s)" % (session_func.__name__, py, param) + sessions_list = [f"{session_func.__name__}-{py}({param})" for py, param in product(session_func.python, session_func.parametrize)] # print the list so that it can be caught by GHA. diff --git a/src/mkdocs_gallery/gen_single.py b/src/mkdocs_gallery/gen_single.py index 08266c6c..416b8678 100644 --- a/src/mkdocs_gallery/gen_single.py +++ b/src/mkdocs_gallery/gen_single.py @@ -458,6 +458,8 @@ def generate(gallery: GalleryBase, seen_backrefs: Set) -> Tuple[str, str, str, L all_thumbnail_entries.append(thumb_div) # Write the gallery summary index.md + # Note: we write the HTML comment at the bottom instead of the top because having it at the top prevents html + # page metadata from mkdocs-material to be processed correctly. See GH#96 index_md = f"""{readme_contents} {"".join(all_thumbnail_entries)} From b4177df5bc348e25c2b86c7d72e61f267e18d5aa Mon Sep 17 00:00:00 2001 From: Sylvain MARIE Date: Wed, 4 Sep 2024 14:01:35 +0200 Subject: [PATCH 04/11] Now using latest ci/nox template (from makefun) --- ci_tools/nox_utils.py | 938 ++++++------------------------------------ noxfile.py | 154 ++++--- 2 files changed, 193 insertions(+), 899 deletions(-) diff --git a/ci_tools/nox_utils.py b/ci_tools/nox_utils.py index b7aebfa5..3c4df0f5 100644 --- a/ci_tools/nox_utils.py +++ b/ci_tools/nox_utils.py @@ -1,349 +1,134 @@ -import asyncio -import logging -import os -import re -from shlex import split -import shutil -import subprocess -import sys from collections import namedtuple -from inspect import isfunction, signature -from itertools import product +import logging from pathlib import Path -from typing import IO, Any, Dict, Iterable, Mapping, Optional, Sequence, Tuple, Union +import shutil +import os + +from typing import Sequence, Dict, Union import nox -from makefun import add_signature_parameters, remove_signature_parameters, wraps -from nox.sessions import Session + nox_logger = logging.getLogger("nox") -PY27 = "2.7" -PY35 = "3.5" -PY36 = "3.6" -PY37 = "3.7" -PY38 = "3.8" -PY39 = "3.9" -PY310 = "3.10" -PY311 = "3.11" -PY312 = "3.12" +PY27, PY35, PY36, PY37, PY38, PY39, PY310, PY311, PY312, PY313 = ("2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10", + "3.11", "3.12", "3.13") DONT_INSTALL = "dont_install" -def power_session( - func=None, - envs=None, - grid_param_name="env", - python=None, - py=None, - reuse_venv=None, - name=None, - venv_backend=None, - venv_params=None, - logsdir=None, - **kwargs +def install_reqs( + session, + # pre wired phases + setup=False, + install=False, + tests=False, + extras=(), + # custom phase + phase=None, + phase_reqs=None, + versions_dct=None ): - """A nox.session on steroids - - :param func: - :param envs: a dictionary {key: dict_of_params} where key is either the python version of a tuple (python version, - grid id) and all keys in the dict_of_params must be the same in all entries. The decorated function should - have one parameter for each of these keys, they will be injected with the value. - :param grid_param_name: when the key in `envs` is a tuple, this name will be the name of the generated parameter to - iterate through the various combinations for each python version. - :param python: - :param py: - :param reuse_venv: - :param name: - :param venv_backend: - :param venv_params: - :param logsdir: - :param kwargs: - :return: """ - if func is not None: - return power_session()(func) + A high-level helper to install requirements from the various project files + + - pyproject.toml "[build-system] requires" (if setup=True) + - setup.cfg "[options] setup_requires" (if setup=True) + - setup.cfg "[options] install_requires" (if install=True) + - setup.cfg "[options] test_requires" (if tests=True) + - setup.cfg "[options.extras_require] <...>" (if extras=(a tuple of extras)) + + Two additional mechanisms are provided in order to customize how packages are installed. + + Conda packages + -------------- + If the session runs on a conda environment, you can add a [tool.conda] section to your pyproject.toml. This + section should contain a `conda_packages` entry containing the list of package names that should be installed + using conda instead of pip. + + ``` + [tool.conda] + # Declare that the following packages should be installed with conda instead of pip + # Note: this includes packages declared everywhere, here and in setup.cfg + conda_packages = [ + "setuptools", + "wheel", + "pip" + ] + ``` + + Version constraints + ------------------- + In addition to the version constraints in the pyproject.toml and setup.cfg, you can specify additional temporary + constraints with the `versions_dct` argument , for example if you know that this executes on a specific python + version that requires special care. + For this, simply pass a dictionary of {'pkg_name': 'pkg_constraint'} for example {"pip": ">10"}. + + """ + + # Read requirements from pyproject.toml + toml_setup_reqs, toml_use_conda_for = read_pyproject_toml() + if setup: + install_any(session, "pyproject.toml#build-system", toml_setup_reqs, + use_conda_for=toml_use_conda_for, versions_dct=versions_dct) + + # Read test requirements from setup.cfg + setup_cfg = read_setuptools_cfg() + if setup: + install_any(session, "setup.cfg#setup_requires", setup_cfg.setup_requires, + use_conda_for=toml_use_conda_for, versions_dct=versions_dct) + if install: + install_any(session, "setup.cfg#install_requires", setup_cfg.install_requires, + use_conda_for=toml_use_conda_for, versions_dct=versions_dct) + if tests: + install_any(session, "setup.cfg#tests_requires", setup_cfg.tests_requires, + use_conda_for=toml_use_conda_for, versions_dct=versions_dct) + + for extra in extras: + install_any(session, "setup.cfg#extras_require#%s" % extra, setup_cfg.extras_require[extra], + use_conda_for=toml_use_conda_for, versions_dct=versions_dct) + + if phase is not None: + install_any(session, phase, phase_reqs, use_conda_for=toml_use_conda_for, versions_dct=versions_dct) + + +def install_any(session, + phase_name: str, + pkgs: Sequence[str], + use_conda_for: Sequence[str] = (), + versions_dct: Dict[str, str] = None, + ): + """Install the `pkgs` provided with `session.install(*pkgs)`, except for those present in `use_conda_for`""" + + # use the provided versions dictionary to update the versions + if versions_dct is None: + versions_dct = dict() + pkgs = [pkg + versions_dct.get(pkg, "") for pkg in pkgs if versions_dct.get(pkg, "") != DONT_INSTALL] + + nox_logger.debug("\nAbout to install *%s* requirements: %s.\n " + "Conda pkgs are %s" % (phase_name, pkgs, use_conda_for)) + + # install on conda... if the session uses conda backend + if not isinstance(session.virtualenv, nox.virtualenv.CondaEnv): + conda_pkgs = [] else: + conda_pkgs = [pkg_req for pkg_req in pkgs if any(get_req_pkg_name(pkg_req) == c for c in use_conda_for)] + if len(conda_pkgs) > 0: + nox_logger.info("[%s] Installing requirements with conda: %s" % (phase_name, conda_pkgs)) + session.conda_install(*conda_pkgs) - def combined_decorator(f): - # open a log file for the session, use it to stream the commands stdout and stderrs, - # and possibly inject the log file in the session function - if logsdir is not None: - f = with_logfile(logs_dir=logsdir)(f) - - # replace Session with PowerSession before it is passed to `with_logfile` - f = with_power_session(f) - - # decorate with @nox.session and possibly @nox.parametrize to create the grid - return nox_session_with_grid( - python=python, - py=py, - envs=envs, - reuse_venv=reuse_venv, - name=name, - grid_param_name=grid_param_name, - venv_backend=venv_backend, - venv_params=venv_params, - **kwargs - )(f) - - return combined_decorator - - -def with_power_session(f=None): - """A decorator to patch the session objects in order to add all methods from Session2""" - - if f is not None: - return with_power_session()(f) - - def _decorator(f): - @wraps(f) - def _f_wrapper(**kwargs): - # patch the session arg - PowerSession.patch(kwargs["session"]) - - # finally execute the session - return f(**kwargs) - - return _f_wrapper - - return _decorator - - -class PowerSession(Session): - """ - Our nox session improvements - """ - - # ------------ commandline runners ----------- - - def run2(self, command: Union[Iterable[str], str], logfile: Union[bool, str, Path] = True, **kwargs): - """ - An improvement of session.run that is able to - - - automatically split the provided command if it is a string - - use a log file - - :param command: - :param logfile: None/False (normal nox behaviour), or True (using nox file handler), or a file path. - :param kwargs: - :return: - """ - if isinstance(command, str): - command = split(command) - - self.run(*command, logfile=logfile, **kwargs) - - def run_multi(self, cmds: str, logfile: Union[bool, str, Path] = True, **kwargs): - """ - An improvement of session.run that is able to - - - support multiline strings - - use a log file - - :param cmds: - :param logfile: None/False (normal nox behaviour), or True (using nox file handler), or a file path. - :param kwargs: - :return: - """ - for cmdline in (line for line in cmds.splitlines() if line): - self.run2(cmdline, logfile=logfile, **kwargs) - - # ------------ requirements installers ----------- - - def install_reqs( - self, - # pre wired phases - setup=False, - install=False, - tests=False, - extras=(), - # custom phase - phase=None, - phase_reqs=None, - versions_dct=None, - ): - """ - A high-level helper to install requirements from the various project files - - - pyproject.toml "[build-system] requires" (if setup=True) - - setup.cfg "[options] setup_requires" (if setup=True) - - setup.cfg "[options] install_requires" (if install=True) - - setup.cfg "[options] test_requires" (if tests=True) - - setup.cfg "[options.extras_require] <...>" (if extras=(a tuple of extras)) - - Two additional mechanisms are provided in order to customize how packages are installed. - - Conda packages - -------------- - If the session runs on a conda environment, you can add a [tool.conda] section to your pyproject.toml. This - section should contain a `conda_packages` entry containing the list of package names that should be installed - using conda instead of pip. - - ``` - [tool.conda] - # Declare that the following packages should be installed with conda instead of pip - # Note: this includes packages declared everywhere, here and in setup.cfg - conda_packages = [ - "setuptools", - "wheel", - "pip" - ] - ``` - - Version constraints - ------------------- - In addition to the version constraints in the pyproject.toml and setup.cfg, you can specify additional temporary - constraints with the `versions_dct` argument , for example if you know that this executes on a specific python - version that requires special care. - For this, simply pass a dictionary of {'pkg_name': 'pkg_constraint'} for example {"pip": ">10"}. - - """ - - # Read requirements from pyproject.toml - toml_setup_reqs, toml_use_conda_for = read_pyproject_toml() - if setup: - self.install_any( - "pyproject.toml#build-system", - toml_setup_reqs, - use_conda_for=toml_use_conda_for, - versions_dct=versions_dct, - ) - - # Read test requirements from setup.cfg - setup_cfg = read_setuptools_cfg() - if setup: - self.install_any( - "setup.cfg#setup_requires", - setup_cfg.setup_requires, - use_conda_for=toml_use_conda_for, - versions_dct=versions_dct, - ) - if install: - self.install_any( - "setup.cfg#install_requires", - setup_cfg.install_requires, - use_conda_for=toml_use_conda_for, - versions_dct=versions_dct, - ) - if tests: - self.install_any( - "setup.cfg#tests_requires", - setup_cfg.tests_requires, - use_conda_for=toml_use_conda_for, - versions_dct=versions_dct, - ) - - for extra in extras: - self.install_any( - "setup.cfg#extras_require#%s" % extra, - setup_cfg.extras_require[extra], - use_conda_for=toml_use_conda_for, - versions_dct=versions_dct, - ) - - if phase is not None: - self.install_any( - phase, - phase_reqs, - use_conda_for=toml_use_conda_for, - versions_dct=versions_dct, - ) - - def uses_conda(self): - return isinstance(self.virtualenv, nox.virtualenv.CondaEnv) - - def install_any( - self, - phase_name: str, - pkgs: Sequence[str], - use_conda_for: Sequence[str] = (), - versions_dct: Dict[str, str] = None, - logfile: Union[bool, str, Path] = True, - ): - """Install the `pkgs` provided with `session.install(*pkgs)`, except for those present in `use_conda_for`""" - - nox_logger.debug( - "\nAbout to install *%s* requirements: %s.\n " "Conda pkgs are %s" % (phase_name, pkgs, use_conda_for) - ) - - # use the provided versions dictionary to update the versions - if versions_dct is None: - versions_dct = dict() - pkgs = [pkg + _get_suffix(pkg, versions_dct) for pkg in pkgs if versions_dct.get(pkg, "") != DONT_INSTALL] - - # install on conda... if the session uses conda backend - if not self.uses_conda(): - conda_pkgs = [] - else: - conda_pkgs = [pkg_req for pkg_req in pkgs if any(get_req_pkg_name(pkg_req) == c for c in use_conda_for)] - if len(conda_pkgs) > 0: - nox_logger.info("[%s] Installing requirements with conda: %s" % (phase_name, conda_pkgs)) - self.conda_install2(*conda_pkgs, logfile=logfile) - - pip_pkgs = [pkg_req for pkg_req in pkgs if pkg_req not in conda_pkgs] - # safety: make sure that nothing went modified or forgotten - assert set(conda_pkgs).union(set(pip_pkgs)) == set(pkgs) - if len(pip_pkgs) > 0: - nox_logger.info("[%s] Installing requirements with pip: %s" % (phase_name, pip_pkgs)) - self.install2(*pip_pkgs, logfile=logfile) - - def conda_install2(self, *conda_pkgs, logfile: Union[bool, str, Path] = True, **kwargs): - """ - Same as session.conda_install() but with support for `logfile`. - - :param conda_pkgs: - :param logfile: None/False (normal nox behaviour), or True (using nox file handler), or a file path. - :return: - """ - return self.conda_install(*conda_pkgs, logfile=logfile, **kwargs) - - def install2(self, *pip_pkgs, logfile: Union[bool, str, Path] = True, **kwargs): - """ - Same as session.install() but with support for `logfile`. - - :param pip_pkgs: - :param logfile: None/False (normal nox behaviour), or True (using nox file handler), or a file path. - :return: - """ - return self.install(*pip_pkgs, logfile=logfile, **kwargs) - - def get_session_id(self): - """Return the session id""" - if self.uses_conda(): - return Path(self.bin).name - else: - return Path(self.bin).parent.name - - @classmethod - def is_power_session(cls, session: Session): - return PowerSession.install2.__name__ in session.__dict__ - - @classmethod - def patch(cls, session: Session): - """ - Add all methods from this class to the provided object. - Note that we could instead have created a proper proxy... but complex for not a lot of benefit. - :param session: - :return: - """ - if not cls.is_power_session(session): - for m_name, m in cls.__dict__.items(): - if not isfunction(m): - continue - if m is cls.patch: - continue - if not hasattr(session, m_name): - setattr(session.__class__, m_name, m) - - return True + pip_pkgs = [pkg_req for pkg_req in pkgs if pkg_req not in conda_pkgs] + # safety: make sure that nothing went modified or forgotten + assert set(conda_pkgs).union(set(pip_pkgs)) == set(pkgs) + if len(pip_pkgs) > 0: + nox_logger.info("[%s] Installing requirements with pip: %s" % (phase_name, pip_pkgs)) + session.install(*pip_pkgs) # ------------- requirements related -def read_pyproject_toml(): +def read_pyproject_toml() -> Union[list, list]: """ Reads the `pyproject.toml` and returns @@ -352,23 +137,19 @@ def read_pyproject_toml(): """ if os.path.exists("pyproject.toml"): import toml - nox_logger.debug("\nA `pyproject.toml` file exists. Loading it.") pyproject = toml.load("pyproject.toml") - requires = pyproject["build-system"]["requires"] + requires = pyproject['build-system']['requires'] try: - conda_pkgs = pyproject["tool"]["conda"]["conda_packages"] + conda_pkgs = pyproject['tool']['conda']['conda_packages'] except KeyError: - conda_pkgs = dict() + conda_pkgs = [] return requires, conda_pkgs else: raise FileNotFoundError("No `pyproject.toml` file exists. No dependency will be installed ...") -SetupCfg = namedtuple( - "SetupCfg", - ("setup_requires", "install_requires", "tests_requires", "extras_require"), -) +SetupCfg = namedtuple('SetupCfg', ('setup_requires', 'install_requires', 'tests_requires', 'extras_require')) def read_setuptools_cfg(): @@ -377,15 +158,12 @@ def read_setuptools_cfg(): """ # see https://stackoverflow.com/a/30679041/7262247 from setuptools import Distribution - dist = Distribution() dist.parse_config_files() - return SetupCfg( - setup_requires=dist.setup_requires, - install_requires=dist.install_requires, - tests_requires=dist.tests_require, - extras_require=dist.extras_require, - ) + return SetupCfg(setup_requires=dist.setup_requires, + install_requires=dist.install_requires, + tests_requires=dist.tests_require, + extras_require=dist.extras_require) def get_req_pkg_name(r): @@ -395,283 +173,12 @@ def get_req_pkg_name(r): "funcsigs;python<'3.5'" will return "funcsigs" "pytest>=3" will return "pytest" """ - return r.replace("<", "=").replace(">", "=").replace(";", "=").split("=")[0] - - -# ------------- log related - - -def with_logfile( - logs_dir: Path, - logfile_arg: str = "logfile", - logfile_handler_arg: str = "logfilehandler", -): - """A decorator to inject a logfile""" - - def _decorator(f): - # check the signature of f - foo_sig = signature(f) - needs_logfile_injection = logfile_arg in foo_sig.parameters - needs_logfilehandler_injection = logfile_handler_arg in foo_sig.parameters - - # modify the exposed signature if needed - new_sig = None - if needs_logfile_injection: - new_sig = remove_signature_parameters(foo_sig, logfile_arg) - if needs_logfilehandler_injection: - new_sig = remove_signature_parameters(foo_sig, logfile_handler_arg) - - @wraps(f, new_sig=new_sig) - def _f_wrapper(**kwargs): - # find the session arg - session = kwargs["session"] # type: Session - - # add file handler to logger - logfile = logs_dir / ("%s.log" % PowerSession.get_session_id(session)) - error_logfile = logfile.with_name("ERROR_%s" % logfile.name) - success_logfile = logfile.with_name("SUCCESS_%s" % logfile.name) - # delete old files if present - for _f in (logfile, error_logfile, success_logfile): - if _f.exists(): - _f.unlink() - - # add a FileHandler to the logger - logfile_handler = log_to_file(logfile) - - # inject the log file / log file handler in the args: - if needs_logfile_injection: - kwargs[logfile_arg] = logfile - if needs_logfilehandler_injection: - kwargs[logfile_handler_arg] = logfile_handler - - # finally execute the session - try: - res = f(**kwargs) - except Exception as e: - # close and detach the file logger and rename as ERROR_....log - remove_file_logger() - logfile.rename(error_logfile) - raise e - else: - # close and detach the file logger and rename as SUCCESS_....log - remove_file_logger() - logfile.rename(success_logfile) - return res - - return _f_wrapper - - return _decorator - - -def log_to_file(file_path: Union[str, Path]): - """ - Closes and removes all file handlers from the nox logger, - and add a new one to the provided file path - - :param file_path: - :return: - """ - for h in list(nox_logger.handlers): - if isinstance(h, logging.FileHandler): - h.close() - nox_logger.removeHandler(h) - fh = logging.FileHandler(str(file_path), mode="w") - nox_logger.addHandler(fh) - return fh - - -def get_current_logfile_handler(): - """ - Returns the current unique log file handler (see `log_to_file`) - """ - for h in list(nox_logger.handlers): - if isinstance(h, logging.FileHandler): - return h - return None - - -def get_log_file_stream(): - """ - Returns the output stream for the current log file handler if any (see `log_to_file`) - """ - h = get_current_logfile_handler() - if h is not None: - return h.stream - return None - - -def remove_file_logger(): - """ - Closes and detaches the current logfile handler - :return: - """ - h = get_current_logfile_handler() - if h is not None: - h.close() - nox_logger.removeHandler(h) - - -# ------------ environment grid / parametrization related - - -def nox_session_with_grid( - python=None, - py=None, - envs: Mapping[str, Mapping[str, Any]] = None, - reuse_venv: Optional[bool] = None, - name: Optional[str] = None, - venv_backend: Any = None, - venv_params: Any = None, - grid_param_name: str = None, - **kwargs -): - """ - Since nox is not yet capable to define a build matrix with python and parameters mixed in the same parametrize - this implements it with a dirty hack. - To remove when https://github.com/theacodes/nox/pull/404 is complete - - :param envs: - :param env_python_key: - :return: - """ - if envs is None: - # Fast track default to @nox.session - return nox.session( - python=python, - py=py, - reuse_venv=reuse_venv, - name=name, - venv_backend=venv_backend, - venv_params=venv_params, - **kwargs - ) - else: - # Current limitation : session param names can be 'python' or 'py' only - if py is not None or python is not None: - raise ValueError( - "`python` session argument can not be provided both directly and through the " - "`env` with `session_param_names`" - ) - - # First examine the env and collect the parameter values for python - all_python = [] - all_params = [] - - env_contents_names = None - has_parameter = None - for env_id, env_params in envs.items(): - # consistency checks for the env_id - if has_parameter is None: - has_parameter = isinstance(env_id, tuple) - else: - if has_parameter != isinstance(env_id, tuple): - raise ValueError("All keys in env should be tuples, or not be tuples. Error for %r" % env_id) - - # retrieve python version and parameter - if not has_parameter: - if env_id not in all_python: - all_python.append(env_id) - else: - if len(env_id) != 2: - raise ValueError("Only a size-2 tuple can be used as env id") - py_id, param_id = env_id - if py_id not in all_python: - all_python.append(py_id) - if param_id not in all_params: - all_params.append(param_id) - - # consistency checks for the dict contents. - if env_contents_names is None: - env_contents_names = set(env_params.keys()) - else: - if env_contents_names != set(env_params.keys()): - raise ValueError( - "Environment %r parameters %r does not match parameters in the first environment: %r" - % (env_id, env_contents_names, set(env_params.keys())) - ) - - if has_parameter and not grid_param_name: - raise ValueError("You must provide a grid parameter name when the env keys are tuples.") - - def _decorator(f): - s_name = name if name is not None else f.__name__ - for pyv, _param in product(all_python, all_params): - if (pyv, _param) not in envs: - # create a dummy folder to avoid creating a useless venv ? - env_dir = Path(".nox") / ("%s-%s-%s-%s" % (s_name, pyv.replace(".", "-"), grid_param_name, _param)) - env_dir.mkdir(parents=True, exist_ok=True) - - # check the signature of f - foo_sig = signature(f) - missing = env_contents_names - set(foo_sig.parameters) - if len(missing) > 0: - raise ValueError("Session function %r does not contain environment parameter(s) %r" % (f.__name__, missing)) - - # modify the exposed signature if needed - new_sig = None - if len(env_contents_names) > 0: - new_sig = remove_signature_parameters(foo_sig, *env_contents_names) - - if has_parameter: - if grid_param_name in foo_sig.parameters: - raise ValueError("Internal error, this parameter has a reserved name: %r" % grid_param_name) - else: - new_sig = add_signature_parameters(new_sig, last=(grid_param_name,)) - - @wraps(f, new_sig=new_sig) - def _f_wrapper(**kwargs): - # find the session arg - session = kwargs["session"] # type: Session - - # get the versions to use for this environment - try: - if has_parameter: - grid_param = kwargs.pop(grid_param_name) - params_dct = envs[(session.python, grid_param)] - else: - params_dct = envs[session.python] - except KeyError: - # Skip this session, it is a dummy one - nox_logger.warning( - "Skipping configuration, %r is not meant to be executed in this session for python version %r" % - (grid_param if has_parameter else "this", session.python) - ) - return - - # inject the parameters in the args: - kwargs.update(params_dct) - - # finally execute the session - return f(**kwargs) - - if has_parameter: - _f_wrapper = nox.parametrize(grid_param_name, all_params)(_f_wrapper) - - _f_wrapper = nox.session( - python=all_python, - reuse_venv=reuse_venv, - name=name, - venv_backend=venv_backend, - venv_params=venv_params, - )(_f_wrapper) - return _f_wrapper - - return _decorator + return r.replace('<', '=').replace('>', '=').replace(';', '=').split("=")[0] # ----------- other goodies -def _get_suffix(pkg, versions_dct): - res = re.split("<|=|>|;", pkg.strip()) - prefix = "" - suffix = versions_dct.get(res[0], "") - if len(res) > 1 and len(suffix) > 0: - prefix = "," - - return prefix + suffix - - def rm_file(folder: Union[str, Path]): """Since on windows Path.unlink throws permission error sometimes, os.remove is preferred.""" if isinstance(folder, str): @@ -690,208 +197,3 @@ def rm_folder(folder: Union[str, Path]): if folder.exists(): shutil.rmtree(str(folder)) # Folders.site.unlink() --> possible PermissionError - - -# --- the patch of popen able to tee to logfile -- - - -import nox.popen as nox_popen_module - -orig_nox_popen = nox_popen_module.popen - - -class LogFileStreamCtx: - def __init__(self, logfile_stream): - self.logfile_stream = logfile_stream - - def __enter__(self): - return self.logfile_stream - - def __exit__(self, exc_type, exc_val, exc_tb): - pass - - -def patched_popen( - args: Sequence[str], - env: Mapping[str, str] = None, - silent: bool = False, - stdout: Union[int, IO] = None, - stderr: Union[int, IO] = subprocess.STDOUT, - logfile: Union[bool, str, Path] = None, - **kwargs -) -> Tuple[int, str]: - """ - Our patch of nox.popen.popen(). - - Current behaviour in `nox` is - - - when `silent=True` (default), process err is redirected to STDOUT and process out is captured in a PIPE and sent - to the logger (that does not displaying it :) ) - - - when `silent=False` (explicitly set, or when nox is run with verbose flag), process out and process err are both - redirected to STDOUT. - - Our implementation allows us to be a little more flexible: - - - if logfile is True or a string/Path, both process err and process out are both TEE-ed to logfile - - at the same time, the above behaviour remains. - - :param args: - :param env: - :param silent: - :param stdout: - :param stderr: - :param logfile: None/False (normal nox behaviour), or True (using nox file handler), or a file path. - :return: - """ - logfile_stream = get_log_file_stream() - - if logfile in (None, False) or (logfile is True and logfile_stream is None): - # execute popen as usual - return orig_nox_popen(args=args, env=env, silent=silent, stdout=stdout, stderr=stderr, **kwargs) - - else: - # we'll need to tee the popen - if logfile is True: - ctx = LogFileStreamCtx - else: - ctx = lambda _: open(logfile, "a") - - with ctx(logfile_stream) as log_file_stream: - if silent and stdout is not None: - raise ValueError( - "Can not specify silent and stdout; passing a custom stdout always silences the commands output in " - "Nox's log." - ) - - shell = kwargs.get("shell", False) - if shell: - raise ValueError("Using shell=True is not yet supported with async streaming to log files") - - if stdout is not None or stderr is not subprocess.STDOUT: - raise ValueError("Using custom streams is not yet supported with async popen") - - # old way - # proc = subprocess.Popen(args, env=env, stdout=stdout, stderr=stderr) - - # New way: use asyncio to stream correctly - # Note: if keyboard interrupts do not work we should check - # https://mail.python.org/pipermail/async-sig/2017-August/000374.html maybe or the following threads. - - # define the async coroutines - async def async_popen(): - process = await asyncio.create_subprocess_exec( - *args, env=env, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, **kwargs - ) - - # bind the out and err streams - see https://stackoverflow.com/a/59041913/7262247 - # to mimic nox behaviour we only use a single capturing list - outlines = [] - await asyncio.wait( - [ - asyncio.create_task( - # process out is only redirected to STDOUT if not silent - _read_stream( - process.stdout, - lambda l: tee( - l, - sinklist=outlines, - sinkstream=log_file_stream, - quiet=silent, - verbosepipe=sys.stdout, - ), - ) - ), - # process err is always redirected to STDOUT (quiet=False) with a specific label - asyncio.create_task( - _read_stream( - process.stderr, - lambda l: tee( - l, - sinklist=outlines, - sinkstream=log_file_stream, - quiet=False, - verbosepipe=sys.stdout, - label="ERR:", - ), - ), - ), - ] - ) - return_code = await process.wait() # make sur the process has ended and retrieve its return code - return return_code, outlines - - # run the coroutine in the event loop - loop = asyncio.get_event_loop() - return_code, outlines = loop.run_until_complete(async_popen()) - - # just in case, flush everything - log_file_stream.flush() - sys.stdout.flush() - sys.stderr.flush() - - if silent: - # same behaviour as in nox: this will be passed to the logger, and it will act depending on verbose flag - out = "\n".join(outlines) if len(outlines) > 0 else "" - else: - # already written to stdout, no need to capture - out = "" - - return return_code, out - - -async def _read_stream(stream, callback): - """Helper async coroutine to read from a stream line by line and write them in callback""" - while True: - line = await stream.readline() - if line: - callback(line) - else: - break - - -def tee(linebytes, sinklist, sinkstream, verbosepipe, quiet, label=""): - """ - Helper routine to read a line, decode it, and append it to several sinks: - - - an optional `sinklist` list that will receive the decoded string in its "append" method - - an optional `sinkstream` stream that will receive the decoded string in its "writelines" method - - an optional `verbosepipe` stream that will receive only when quiet=False, the decoded string through a print - - append it to the sink, and if quiet=False, write it to pipe too. - """ - line = linebytes.decode("utf-8").rstrip() - - if sinklist is not None: - sinklist.append(line) - - if sinkstream is not None: - sinkstream.write(line + "\n") - sinkstream.flush() - - if not quiet and verbosepipe is not None: - print(label, line, file=verbosepipe) - verbosepipe.flush() - - -def patch_popen(): - nox_popen_module.popen = patched_popen - - from nox.command import popen - - if popen is not patched_popen: - nox.command.popen = patched_popen - - # change event loop on windows - # see https://stackoverflow.com/a/44639711/7262247 - # and https://docs.python.org/3/library/asyncio-platforms.html#subprocess-support-on-windows - if "win32" in sys.platform: - # Windows specific event-loop policy & cmd - asyncio.set_event_loop_policy(asyncio.WindowsProactorEventLoopPolicy()) - # cmds = [['C:/Windows/system32/HOSTNAME.EXE']] - - # loop = asyncio.ProactorEventLoop() - # asyncio.set_event_loop(loop) - - -patch_popen() diff --git a/noxfile.py b/noxfile.py index 7d4b6591..99207e41 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1,6 +1,5 @@ import argparse -from itertools import product -from json import dumps +import json import logging import nox # noqa @@ -12,21 +11,13 @@ # add parent folder to python path so that we can import noxfile_utils.py # note that you need to "pip install -r noxfile-requiterements.txt" for this file to work. sys.path.append(str(Path(__file__).parent / "ci_tools")) -from nox_utils import PY37, PY38, PY39, PY310, PY311, power_session, rm_folder, rm_file, PowerSession # noqa +from nox_utils import PY37, PY38, PY39, PY310, PY311, install_reqs, rm_folder, rm_file # noqa pkg_name = "mkdocs_gallery" gh_org = "smarie" gh_repo = "mkdocs-gallery" -ENVS = { - PY311: {"coverage": False, "pkg_specs": {"pip": ">19"}}, - PY310: {"coverage": False, "pkg_specs": {"pip": ">19"}}, - PY39: {"coverage": False, "pkg_specs": {"pip": ">19"}}, - PY37: {"coverage": False, "pkg_specs": {"pip": ">19"}}, - # IMPORTANT: this should be last so that the folder docs/reports is not deleted afterwards - PY38: {"coverage": True, "pkg_specs": {"pip": ">19"}}, -} # set the default activated sessions, minimal for CI nox.options.sessions = ["tests", "flake8", "docs"] # , "docs", "gh_pages" @@ -63,8 +54,22 @@ class Folders: flake8_badge = flake8_reports / "flake8-badge.svg" -@power_session(envs=ENVS, logsdir=Folders.runlogs) -def tests(session: PowerSession, coverage, pkg_specs): +ENVS = { + PY311: {"coverage": False, "pkg_specs": {"pip": ">19"}}, + PY310: {"coverage": False, "pkg_specs": {"pip": ">19"}}, + PY39: {"coverage": False, "pkg_specs": {"pip": ">19"}}, + PY37: {"coverage": False, "pkg_specs": {"pip": ">19"}}, + # IMPORTANT: this should be last so that the folder docs/reports is not deleted afterwards + PY38: {"coverage": True, "pkg_specs": {"pip": ">19"}}, +} + +ENV_PARAMS = tuple((k, v["coverage"], v["pkg_specs"]) for k, v in ENVS.items()) +ENV_IDS = tuple(ENVS.keys()) + + +@nox.session +@nox.parametrize("python,coverage,pkg_specs", ENV_PARAMS, ids=ENV_IDS) +def tests(session, coverage, pkg_specs): """Run the test suite, including test reports generation and coverage reports. """ # As soon as this runs, we delete the target site and coverage files to avoid reporting wrong coverage/etc. @@ -87,7 +92,7 @@ def tests(session: PowerSession, coverage, pkg_specs): # session.run2("pip uninstall pytest-asyncio --yes") # install all requirements - session.install_reqs(setup=True, install=True, tests=True, versions_dct=pkg_specs) + install_reqs(session, setup=True, install=True, tests=True, versions_dct=pkg_specs) # Since our tests are currently limited, use our own doc generation as a test cannot_run_mayavi = version.parse(session.python) < version.parse(PY38) if cannot_run_mayavi: @@ -118,7 +123,7 @@ def tests(session: PowerSession, coverage, pkg_specs): # session.run2("conda list", env={"CONDA_PREFIX": str(conda_prefix), "CONDA_DEFAULT_ENV": session.get_session_id()}) # Fail if the assumed python version is not the actual one - session.run2(f"python ci_tools/check_python_version.py {session.python}") + session.run("python", "ci_tools/check_python_version.py", session.python) # check that it can be imported even from a different folder # Important: do not surround the command into double quotes as in the shell ! @@ -127,35 +132,35 @@ def tests(session: PowerSession, coverage, pkg_specs): # finally run all tests if not coverage: # install self so that it is recognized by pytest - session.install2('.', '--no-deps') + session.install(".", "--no-deps") # simple: pytest only - session.run2("python -m pytest --cache-clear -v tests/") + session.run("python", "-m", "pytest", "--cache-clear", "-v", "tests/") # since our tests are too limited, we use our own mkdocs build as additional test for now. if cannot_run_mayavi: - session.run2("python -m mkdocs build -f mkdocs-no-mayavi.yml") + session.run("python", "-m", "mkdocs", "build", "-f", "mkdocs-no-mayavi.yml") else: - session.run2("python -m mkdocs build -f mkdocs.yml") + session.run("python", "-m", "mkdocs", "build", "-f", "mkdocs.yml") # -- add a second build so that we can go through the caching/md5 side if cannot_run_mayavi: - session.run2("python -m mkdocs build -f mkdocs-no-mayavi.yml") + session.run("python", "-m", "mkdocs", "build", "-f", "mkdocs-no-mayavi.yml") else: - session.run2("python -m mkdocs build -f mkdocs.yml") + session.run("python", "-m", "mkdocs", "build", "-f", "mkdocs.yml") else: # install self in "develop" mode so that coverage can be measured - session.install2('-e', '.', '--no-deps') + session.install("-e", ".", "--no-deps") # coverage + junit html reports + badge generation - session.install_reqs(phase="coverage", + install_reqs(session, phase="coverage", phase_reqs=["coverage", "pytest-html", "genbadge[tests,coverage]"], versions_dct=pkg_specs) # --coverage + junit html reports - session.run2(f"coverage run --source src/{pkg_name} " - f"-m pytest --cache-clear " - f'--junitxml="{Folders.test_xml}" --html="{Folders.test_html}" ' - f"-v tests/") + session.run("coverage", "run", "--source", f"src/{pkg_name}", + "-m", "pytest", "--cache-clear", + f"--junitxml={Folders.test_xml}", f"--html={Folders.test_html}", + "-v", "tests/") # -- use the doc generation for coverage if cannot_run_mayavi: @@ -168,28 +173,28 @@ def tests(session: PowerSession, coverage, pkg_specs): else: session.run2(f"coverage run --append --source src/{pkg_name} -m mkdocs build -f mkdocs.yml") - session.run2("coverage report") - session.run2(f'coverage xml -o "{Folders.coverage_xml}"') - session.run2(f'coverage html -d "{Folders.coverage_reports}"') + session.run("coverage", "report") + session.run("coverage", "xml", "-o", f"{Folders.coverage_xml}") + session.run("coverage", "html", "-d", f"{Folders.coverage_reports}") # delete this intermediate file, it is not needed anymore rm_file(Folders.coverage_intermediate_file) # --generates the badge for the test results and fail build if less than x% tests pass nox_logger.info("Generating badge for tests coverage") # Use our own package to generate the badge - session.run2(f'genbadge tests -i "{Folders.test_xml}" -o "{Folders.test_badge}" -t 100') - session.run2(f'genbadge coverage -i "{Folders.coverage_xml}" -o "{Folders.coverage_badge}"') + session.run("genbadge", "tests", "-i", f"{Folders.test_xml}", "-o", f"{Folders.test_badge}", "-t", "100") + session.run("genbadge", "coverage", "-i", f"{Folders.coverage_xml}", "-o", f"{Folders.coverage_badge}") # Cleanup if os.path.exists("mkdocs-no-mayavi.yml"): os.remove("mkdocs-no-mayavi.yml") -@power_session(python=PY39, logsdir=Folders.runlogs) -def flake8(session: PowerSession): +@nox.session(python=PY39) +def flake8(session): """Launch flake8 qualimetry.""" session.install("-r", str(Folders.ci_tools / "flake8-requirements.txt")) - session.install2('.') + session.install(".") rm_folder(Folders.flake8_reports) Folders.flake8_reports.mkdir(parents=True, exist_ok=True) @@ -201,7 +206,7 @@ def flake8(session: PowerSession): session.run("flake8", pkg_name, "--exit-zero", "--format=html", "--htmldir", str(Folders.flake8_reports), "--statistics", "--tee", "--output-file", str(Folders.flake8_intermediate_file)) # generate our badge - session.run2(f'genbadge flake8 -i "{Folders.flake8_intermediate_file}" -o "{Folders.flake8_badge}"') + session.run("genbadge", "flake8", "-i", f"{Folders.flake8_intermediate_file}", "-o", f"{Folders.flake8_badge}") rm_file(Folders.flake8_intermediate_file) @@ -222,41 +227,43 @@ def flake8(session: PowerSession): ] -@power_session(python=[PY39]) -def docs(session: PowerSession): - """Generates the doc and serves it on a local http server. Pass '-- build' to build statically instead.""" - session.install_reqs(phase="docs", phase_reqs=["mkdocs"] + MKDOCS_GALLERY_EXAMPLES_REQS + MKDOCS_GALLERY_EXAMPLES_MAYAVI_REQS) +@nox.session(python=PY39) +def docs(session): + """Generates the doc. Pass '-- serve' to serve it on a local http server instead.""" + + install_reqs(session, phase="docs", phase_reqs=["mkdocs"] + MKDOCS_GALLERY_EXAMPLES_REQS + MKDOCS_GALLERY_EXAMPLES_MAYAVI_REQS) # Install the plugin session.install2('.') if session.posargs: # use posargs instead of "build" - session.run2("mkdocs %s" % " ".join(session.posargs)) + session.run("mkdocs", *session.posargs) else: - session.run2("mkdocs build -f mkdocs.yml") + session.run("mkdocs", "build", "-f", "mkdocs.yml") -@power_session(python=[PY39]) -def publish(session: PowerSession): +@nox.session(python=PY39) +def publish(session): """Deploy the docs+reports on github pages. Note: this rebuilds the docs""" - session.install_reqs(phase="mkdocs", phase_reqs=["mkdocs"] + MKDOCS_GALLERY_EXAMPLES_REQS + MKDOCS_GALLERY_EXAMPLES_MAYAVI_REQS) + + install_reqs(session, phase="mkdocs", phase_reqs=["mkdocs"] + MKDOCS_GALLERY_EXAMPLES_REQS + MKDOCS_GALLERY_EXAMPLES_MAYAVI_REQS) # Install the plugin - session.install2('.') + session.install(".") # possibly rebuild the docs in a static way (mkdocs serve does not build locally) - session.run2("mkdocs build -f mkdocs.yml") + session.run("mkdocs", "build", "-f", "mkdocs.yml") # check that the doc has been generated with coverage if not Folders.site_reports.exists(): - raise ValueError("Test reports have not been built yet. Please run 'nox -s tests-3.7' first") + raise ValueError("Test reports have not been built yet. Please run 'nox -s tests(3.7)' first") # publish the docs - session.run2("mkdocs gh-deploy -f mkdocs.yml") + session.run("mkdocs", "gh-deploy", "-f", "mkdocs.yml") # publish the coverage - now in github actions only - # session.install_reqs(phase="codecov", phase_reqs=["codecov", "keyring"]) + # install_reqs(session, phase="codecov", phase_reqs=["codecov", "keyring"]) # # keyring set https://app.codecov.io/gh// token # import keyring # (note that this import is not from the session env but the main nox env) # codecov_token = keyring.get_password("https://app.codecov.io/gh//>", "token") @@ -264,10 +271,12 @@ def publish(session: PowerSession): # session.run2('codecov -t %s -f %s' % (codecov_token, Folders.coverage_xml)) -@power_session(python=[PY39]) -def release(session: PowerSession): +@nox.session(python=PY39) +def release(session): """Create a release on github corresponding to the latest tag""" + install_reqs(session, phase="setup.py#dist", phase_reqs=["setuptools_scm"]) + # Get current tag using setuptools_scm and make sure this is not a dirty/dev one from setuptools_scm import get_version # (note that this import is not from the session env but the main nox env) from setuptools_scm.version import guess_next_dev_version @@ -279,9 +288,8 @@ def my_scheme(version_): current_tag = get_version(".", version_scheme=my_scheme) # create the package - session.install_reqs(phase="setup.py#dist", phase_reqs=["setuptools_scm"]) rm_folder(Folders.dist) - session.run2("python setup.py sdist bdist_wheel") + session.run("python", "setup.py", "sdist", "bdist_wheel") if version[0].dirty or not version[0].exact: raise ValueError("You need to execute this action on a clean tag version with no local changes.") @@ -308,14 +316,14 @@ def my_scheme(version_): if publish_on_pypi: # keyring set https://upload.pypi.org/legacy/ your-username # keyring set https://test.pypi.org/legacy/ your-username - session.install_reqs(phase="PyPi", phase_reqs=["twine"]) - session.run2("twine upload dist/* -u smarie") # -r testpypi + install_reqs(session, phase="PyPi", phase_reqs=["twine"]) + session.run("twine", "upload", "dist/*", "-u", "smarie") # -r testpypi # create the github release - session.install_reqs(phase="release", phase_reqs=["click", "PyGithub"]) - session.run2(f"python ci_tools/github_release.py -s {gh_token} " - f"--repo-slug {gh_org}/{gh_repo} -cf ./docs/changelog.md " - f"-d https://{gh_org}.github.io/{gh_repo}/changelog {current_tag}") + install_reqs(session, phase="release", phase_reqs=["click", "PyGithub"]) + session.run("python", "ci_tools/github_release.py", "-s", gh_token, + "--repo-slug", f"{gh_org}/{gh_repo}", "-cf", "./docs/changelog.md", + "-d", f"https://{gh_org}.github.io/{gh_repo}/changelog", current_tag) @nox.session(python=False) @@ -336,32 +344,16 @@ def gha_list(session): ) additional_args = parser.parse_args(session.posargs) - # get the desired base session to generate the list for - session_func = globals()[additional_args.session] + # Now use --json CLI option + out = session.run("nox", "-l", "--json", "-s", "tests", external=True, silent=True) + sessions_list = [{"python": s["python"], "session": s["session"]} for s in json.loads(out)] - # list all sessions for this base session - try: - session_func.parametrize - except AttributeError: - if additional_args.with_version: - sessions_list = [{"python": py, "session": f"{session_func.__name__}-{py}"} for py in session_func.python] - else: - sessions_list = [f"{session_func.__name__}-{py}" for py in session_func.python] - else: - if additional_args.with_version: - # sessions_list = [{"python": py, "session": f"{session_func.__name__}-{py}({param})"} - # for py, param in product(session_func.python, session_func.parametrize)] - # Hack to return the valid ones only, in order # TODO remove this hack when ENV is removed - sessions_list = [{"python": py, "session": f"{session_func.__name__}-{py}(env='{env}')"} - for py, env in ENVS.keys()] - else: - sessions_list = [f"{session_func.__name__}-{py}({param})" - for py, param in product(session_func.python, session_func.parametrize)] + # TODO filter # print the list so that it can be caught by GHA. # Note that json.dumps is optional since this is a list of string. # However it is to remind us that GHA expects a well-formatted json list of strings. - print(dumps(sessions_list)) + print(json.dumps(sessions_list)) # if __name__ == '__main__': From 42a139b11a25b24503848ed18274ee971f89669d Mon Sep 17 00:00:00 2001 From: Sylvain MARIE Date: Wed, 4 Sep 2024 14:27:00 +0200 Subject: [PATCH 05/11] Fixed CI --- noxfile-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noxfile-requirements.txt b/noxfile-requirements.txt index 6fb3e1c0..d496174d 100644 --- a/noxfile-requirements.txt +++ b/noxfile-requirements.txt @@ -1,5 +1,5 @@ nox toml -makefun +setuptools<72 # later versions do not read 'tests_require' from setup.cfg anymore setuptools_scm # used in 'release' keyring # used in 'release' From e449f16ff7b100976b4d39e9922325c47848b1f6 Mon Sep 17 00:00:00 2001 From: Sylvain MARIE Date: Wed, 4 Sep 2024 14:31:14 +0200 Subject: [PATCH 06/11] FIxed ci once again --- noxfile-requirements.txt | 1 + noxfile.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/noxfile-requirements.txt b/noxfile-requirements.txt index d496174d..e32b00fe 100644 --- a/noxfile-requirements.txt +++ b/noxfile-requirements.txt @@ -1,3 +1,4 @@ +virtualenv nox toml setuptools<72 # later versions do not read 'tests_require' from setup.cfg anymore diff --git a/noxfile.py b/noxfile.py index 99207e41..2a99f6d6 100644 --- a/noxfile.py +++ b/noxfile.py @@ -96,9 +96,9 @@ def tests(session, coverage, pkg_specs): # Since our tests are currently limited, use our own doc generation as a test cannot_run_mayavi = version.parse(session.python) < version.parse(PY38) if cannot_run_mayavi: - session.install_reqs(phase="tests", phase_reqs=MKDOCS_GALLERY_EXAMPLES_REQS) + install_reqs(session, phase="tests", phase_reqs=MKDOCS_GALLERY_EXAMPLES_REQS) else: - session.install_reqs(phase="tests", phase_reqs=MKDOCS_GALLERY_EXAMPLES_REQS+MKDOCS_GALLERY_EXAMPLES_MAYAVI_REQS) + install_reqs(session, phase="tests", phase_reqs=MKDOCS_GALLERY_EXAMPLES_REQS+MKDOCS_GALLERY_EXAMPLES_MAYAVI_REQS) # Edit mkdocs config file with open("mkdocs.yml", "r") as f: From 064a38675a24accbbd68a9f31bf3e3501e2666f1 Mon Sep 17 00:00:00 2001 From: Sylvain MARIE Date: Wed, 4 Sep 2024 14:38:47 +0200 Subject: [PATCH 07/11] Fixed GHA warning and trying to fix mayavi issue with configobj --- .github/workflows/base.yml | 2 +- noxfile.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/base.yml b/.github/workflows/base.yml index 5bf0211c..e5f2a011 100644 --- a/.github/workflows/base.yml +++ b/.github/workflows/base.yml @@ -36,7 +36,7 @@ jobs: - name: List 'tests' nox sessions and required python versions id: set-matrix - run: echo "::set-output name=matrix::$(nox --json -l -s tests -v)" + run: echo "matrix=$(nox --json -l -s tests -v)" >> $GITHUB_OUTPUT outputs: matrix: ${{ steps.set-matrix.outputs.matrix }} # save nox sessions list to outputs diff --git a/noxfile.py b/noxfile.py index 2a99f6d6..46f92297 100644 --- a/noxfile.py +++ b/noxfile.py @@ -223,7 +223,8 @@ def flake8(session): MKDOCS_GALLERY_EXAMPLES_MAYAVI_REQS = [ "PyQt5", # PyQt is required for the mayavi backend # Note: installing Mayavi from PyPi does not seem to work on GHA CI. - "git+https://github.com/enthought/mayavi.git", # we want mayavi>=4.7.4 when available due to https://github.com/enthought/mayavi/pull/1272 + #"git+https://github.com/enthought/mayavi.git", # we want mayavi>=4.7.4 when available due to https://github.com/enthought/mayavi/pull/1272 + "mayavi" ] From feb04ff764a195090354f84f973e23229a348d83 Mon Sep 17 00:00:00 2001 From: Sylvain MARIE Date: Wed, 4 Sep 2024 14:45:15 +0200 Subject: [PATCH 08/11] Fixed CI --- noxfile.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/noxfile.py b/noxfile.py index 46f92297..c78a9b4b 100644 --- a/noxfile.py +++ b/noxfile.py @@ -163,15 +163,17 @@ def tests(session, coverage, pkg_specs): "-v", "tests/") # -- use the doc generation for coverage + coverage_args = ("coverage", "run", "--append", "--source", f"src/{pkg_name}", "-m", "mkdocs", "build") if cannot_run_mayavi: - session.run2(f"coverage run --append --source src/{pkg_name} -m mkdocs build -f mkdocs-no-mayavi.yml") + session.run(*coverage_args, "-f", "mkdocs-no-mayavi.yml") else: - session.run2(f"coverage run --append --source src/{pkg_name} -m mkdocs build -f mkdocs.yml") - # -- add a second build so that we can go through the caching/md5 side + session.run(*coverage_args, "-f", "mkdocs.yml") + + # -- add a second build so that we can go through the caching/md5 code if cannot_run_mayavi: - session.run2(f"coverage run --append --source src/{pkg_name} -m mkdocs build -f mkdocs-no-mayavi.yml") + session.run(*coverage_args, "-f", "mkdocs-no-mayavi.yml") else: - session.run2(f"coverage run --append --source src/{pkg_name} -m mkdocs build -f mkdocs.yml") + session.run(*coverage_args, "-f", "mkdocs.yml") session.run("coverage", "report") session.run("coverage", "xml", "-o", f"{Folders.coverage_xml}") From 95a63b2632e322eb33629282e3ad6d6200dc9a18 Mon Sep 17 00:00:00 2001 From: Sylvain MARIE Date: Wed, 4 Sep 2024 14:50:22 +0200 Subject: [PATCH 09/11] Added explicit configobj installation to fix "ModuleNotFoundError: No module named 'configobj'" with mayavi --- noxfile.py | 1 + 1 file changed, 1 insertion(+) diff --git a/noxfile.py b/noxfile.py index c78a9b4b..58b7397d 100644 --- a/noxfile.py +++ b/noxfile.py @@ -226,6 +226,7 @@ def flake8(session): "PyQt5", # PyQt is required for the mayavi backend # Note: installing Mayavi from PyPi does not seem to work on GHA CI. #"git+https://github.com/enthought/mayavi.git", # we want mayavi>=4.7.4 when available due to https://github.com/enthought/mayavi/pull/1272 + "configobj", "mayavi" ] From 98d899ce19ba4094f025a6b8635108560d7c7ea9 Mon Sep 17 00:00:00 2001 From: Sylvain MARIE Date: Wed, 4 Sep 2024 14:59:33 +0200 Subject: [PATCH 10/11] Still trying to fix the mayavi issue --- noxfile.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/noxfile.py b/noxfile.py index 58b7397d..402861cf 100644 --- a/noxfile.py +++ b/noxfile.py @@ -227,6 +227,7 @@ def flake8(session): # Note: installing Mayavi from PyPi does not seem to work on GHA CI. #"git+https://github.com/enthought/mayavi.git", # we want mayavi>=4.7.4 when available due to https://github.com/enthought/mayavi/pull/1272 "configobj", + "numpy<2", "mayavi" ] @@ -238,7 +239,7 @@ def docs(session): install_reqs(session, phase="docs", phase_reqs=["mkdocs"] + MKDOCS_GALLERY_EXAMPLES_REQS + MKDOCS_GALLERY_EXAMPLES_MAYAVI_REQS) # Install the plugin - session.install2('.') + session.install('.') if session.posargs: # use posargs instead of "build" From 7cd239b08ae447a3c4b6df3f794d90a8ce91b171 Mon Sep 17 00:00:00 2001 From: Sylvain MARIE Date: Wed, 4 Sep 2024 15:18:40 +0200 Subject: [PATCH 11/11] Modified the readme to explicitly show the metadata customization usage --- examples/README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/examples/README.md b/examples/README.md index 29a635bb..f908900d 100644 --- a/examples/README.md +++ b/examples/README.md @@ -1,3 +1,7 @@ +--- +title: Custom page title, see mkdocs-material reference +--- + # Gallery of Examples This page consists of the 'General example' gallery and a sub-gallery,