From ed07624981b19777dfe87df2d75827704e17cff2 Mon Sep 17 00:00:00 2001 From: Chris Adams Date: Tue, 15 Oct 2024 17:10:25 -0400 Subject: [PATCH] Use PyPI GitHub Action to publish releases (#181) * Use GitHub Action to publish releases and run tests in Python version matrices * Drop support for Python 2 * Use Ruff for formatting & linting Closes #176 --- .github/workflows/pypi-release.yml | 38 ++++++ .github/workflows/test.yml | 36 +++++ .github/workflows/tox.yml | 19 --- .pre-commit-config.yaml | 36 +++++ Dockerfile | 2 +- MANIFEST.in | 1 - Pipfile | 12 -- Pipfile.lock | 207 ----------------------------- README.rst | 15 +-- bagit.py | 95 ++++++------- bench.py | 2 +- pyproject.toml | 40 ++++++ setup.cfg | 17 --- setup.py | 24 ---- test.py | 98 +++++--------- tox.ini | 5 - 16 files changed, 230 insertions(+), 417 deletions(-) create mode 100644 .github/workflows/pypi-release.yml create mode 100644 .github/workflows/test.yml delete mode 100644 .github/workflows/tox.yml create mode 100644 .pre-commit-config.yaml delete mode 100644 Pipfile delete mode 100644 Pipfile.lock create mode 100644 pyproject.toml delete mode 100644 setup.cfg delete mode 100644 tox.ini diff --git a/.github/workflows/pypi-release.yml b/.github/workflows/pypi-release.yml new file mode 100644 index 00000000..aa85fc41 --- /dev/null +++ b/.github/workflows/pypi-release.yml @@ -0,0 +1,38 @@ +name: "PyPI releases" + +on: release + +jobs: + build_sdist: + name: Build Python source distribution + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + + - name: Build sdist + run: pipx run build --sdist + + - uses: actions/upload-artifact@v3 + with: + path: dist/*.tar.gz + + pypi-publish: + name: Upload release to PyPI + if: github.event_name == 'release' && github.event.action == 'published' + needs: + - build_sdist + runs-on: ubuntu-latest + environment: + name: pypi + url: https://pypi.org/p/bagit + permissions: + id-token: write + steps: + - uses: actions/download-artifact@v3 + with: + # unpacks default artifact into dist/ + # if `name: artifact` is omitted, the action will create extra parent dir + name: artifact + path: dist + - name: Publish package distributions to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 00000000..b37ba869 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,36 @@ +name: Test + +on: + push: + branches: [master] + pull_request: + branches: [master] + +jobs: + ruff: # https://docs.astral.sh/ruff + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - run: pip install --user ruff + - run: ruff check --output-format=github + + test: + needs: ruff + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + steps: + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip setuptools wheel + pip install coverage + pip install --editable . + - name: Run test + run: python -m unittest discover diff --git a/.github/workflows/tox.yml b/.github/workflows/tox.yml deleted file mode 100644 index 326af833..00000000 --- a/.github/workflows/tox.yml +++ /dev/null @@ -1,19 +0,0 @@ -name: tox -on: [push, pull_request] -jobs: - tox: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v4 - with: - python-version: | - 3.7 - 3.8 - 3.9 - 3.10 - 3.11 - - run: sudo apt-get install -qy gettext - - run: pip install --upgrade pip - - run: pip install tox - - run: tox diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..9f1dc327 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,36 @@ +exclude: ".*test-data.*" + +repos: + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.6.9 + hooks: + - id: ruff + args: [--fix, --exit-non-zero-on-fix] + - id: ruff-format + + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v5.0.0 + hooks: + - id: check-added-large-files + args: ["--maxkb=128"] + - id: check-ast + - id: check-byte-order-marker + - id: check-case-conflict + - id: check-docstring-first + - id: check-executables-have-shebangs + - id: check-json + - id: check-merge-conflict + - id: check-symlinks + - id: check-xml + - id: check-yaml + args: ["--unsafe"] + - id: debug-statements + - id: detect-aws-credentials + args: ["--allow-missing-credentials"] + - id: detect-private-key + - id: end-of-file-fixer + - id: mixed-line-ending + args: ["--fix=lf"] + - id: trailing-whitespace + - id: pretty-format-json + args: ["--autofix", "--no-sort-keys", "--indent=4"] diff --git a/Dockerfile b/Dockerfile index d37aedc2..491782cb 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.6 +FROM python:3.11 RUN useradd --user-group bagit-tester RUN install -d -o bagit-tester /bagit USER bagit-tester diff --git a/MANIFEST.in b/MANIFEST.in index 22e38442..db7a1998 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,7 +1,6 @@ prune test-data exclude .* exclude Dockerfile -exclude tox.ini exclude MANIFEST.in exclude test.py exclude bench.py diff --git a/Pipfile b/Pipfile deleted file mode 100644 index 00cd19b4..00000000 --- a/Pipfile +++ /dev/null @@ -1,12 +0,0 @@ -[[source]] -url = "https://pypi.python.org/simple" -verify_ssl = true -name = "pypi" - -[packages] - -[dev-packages] -tox = "*" -black = "*" -flake8 = "*" -isort = "*" diff --git a/Pipfile.lock b/Pipfile.lock deleted file mode 100644 index 3e66532e..00000000 --- a/Pipfile.lock +++ /dev/null @@ -1,207 +0,0 @@ -{ - "_meta": { - "hash": { - "sha256": "38fd188c44d6e72a65d72600d96ce97f572aaa97dfb1c39b173d078d1e680172" - }, - "pipfile-spec": 6, - "requires": {}, - "sources": [ - { - "name": "pypi", - "url": "https://pypi.python.org/simple", - "verify_ssl": true - } - ] - }, - "default": {}, - "develop": { - "black": { - "hashes": [ - "sha256:2818cf72dfd5d289e48f37ccfa08b460bf469e67fb7c4abb07edc2e9f16fb63f", - "sha256:41622020d7120e01d377f74249e677039d20e6344ff5851de8a10f11f513bf93", - "sha256:4acf672def7eb1725f41f38bf6bf425c8237248bb0804faa3965c036f7672d11", - "sha256:4be5bb28e090456adfc1255e03967fb67ca846a03be7aadf6249096100ee32d0", - "sha256:4f1373a7808a8f135b774039f61d59e4be7eb56b2513d3d2f02a8b9365b8a8a9", - "sha256:56f52cfbd3dabe2798d76dbdd299faa046a901041faf2cf33288bc4e6dae57b5", - "sha256:65b76c275e4c1c5ce6e9870911384bff5ca31ab63d19c76811cb1fb162678213", - "sha256:65c02e4ea2ae09d16314d30912a58ada9a5c4fdfedf9512d23326128ac08ac3d", - "sha256:6905238a754ceb7788a73f02b45637d820b2f5478b20fec82ea865e4f5d4d9f7", - "sha256:79dcf34b33e38ed1b17434693763301d7ccbd1c5860674a8f871bd15139e7837", - "sha256:7bb041dca0d784697af4646d3b62ba4a6b028276ae878e53f6b4f74ddd6db99f", - "sha256:7d5e026f8da0322b5662fa7a8e752b3fa2dac1c1cbc213c3d7ff9bdd0ab12395", - "sha256:9f50ea1132e2189d8dff0115ab75b65590a3e97de1e143795adb4ce317934995", - "sha256:a0c9c4a0771afc6919578cec71ce82a3e31e054904e7197deacbc9382671c41f", - "sha256:aadf7a02d947936ee418777e0247ea114f78aff0d0959461057cae8a04f20597", - "sha256:b5991d523eee14756f3c8d5df5231550ae8993e2286b8014e2fdea7156ed0959", - "sha256:bf21b7b230718a5f08bd32d5e4f1db7fc8788345c8aea1d155fc17852b3410f5", - "sha256:c45f8dff244b3c431b36e3224b6be4a127c6aca780853574c00faf99258041eb", - "sha256:c7ed6668cbbfcd231fa0dc1b137d3e40c04c7f786e626b405c62bcd5db5857e4", - "sha256:d7de8d330763c66663661a1ffd432274a2f92f07feeddd89ffd085b5744f85e7", - "sha256:e19cb1c6365fd6dc38a6eae2dcb691d7d83935c10215aef8e6c38edee3f77abd", - "sha256:e2af80566f43c85f5797365077fb64a393861a3730bd110971ab7a0c94e873e7" - ], - "index": "pypi", - "markers": "python_version >= '3.8'", - "version": "==24.3.0" - }, - "cachetools": { - "hashes": [ - "sha256:13dfddc7b8df938c21a940dfa6557ce6e94a2f1cdfa58eb90c805721d58f2c14", - "sha256:429e1a1e845c008ea6c85aa35d4b98b65d6a9763eeef3e37e92728a12d1de9d4" - ], - "markers": "python_version ~= '3.7'", - "version": "==5.3.0" - }, - "chardet": { - "hashes": [ - "sha256:0d62712b956bc154f85fb0a266e2a3c5913c2967e00348701b32411d6def31e5", - "sha256:362777fb014af596ad31334fde1e8c327dfdb076e1960d1694662d46a6917ab9" - ], - "markers": "python_version >= '3.7'", - "version": "==5.1.0" - }, - "click": { - "hashes": [ - "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28", - "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de" - ], - "markers": "python_version >= '3.7'", - "version": "==8.1.7" - }, - "colorama": { - "hashes": [ - "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", - "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6'", - "version": "==0.4.6" - }, - "distlib": { - "hashes": [ - "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46", - "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e" - ], - "version": "==0.3.6" - }, - "filelock": { - "hashes": [ - "sha256:ad98852315c2ab702aeb628412cbf7e95b7ce8c3bf9565670b4eaecf1db370a9", - "sha256:fc03ae43288c013d2ea83c8597001b1129db351aad9c57fe2409327916b8e718" - ], - "markers": "python_version >= '3.7'", - "version": "==3.12.0" - }, - "flake8": { - "hashes": [ - "sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7", - "sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181" - ], - "index": "pypi", - "version": "==6.0.0" - }, - "isort": { - "hashes": [ - "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504", - "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6" - ], - "index": "pypi", - "version": "==5.12.0" - }, - "mccabe": { - "hashes": [ - "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", - "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e" - ], - "markers": "python_version >= '3.6'", - "version": "==0.7.0" - }, - "mypy-extensions": { - "hashes": [ - "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", - "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782" - ], - "markers": "python_version >= '3.5'", - "version": "==1.0.0" - }, - "packaging": { - "hashes": [ - "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5", - "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9" - ], - "markers": "python_version >= '3.7'", - "version": "==24.0" - }, - "pathspec": { - "hashes": [ - "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", - "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712" - ], - "markers": "python_version >= '3.8'", - "version": "==0.12.1" - }, - "platformdirs": { - "hashes": [ - "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068", - "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768" - ], - "markers": "python_version >= '3.8'", - "version": "==4.2.0" - }, - "pluggy": { - "hashes": [ - "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159", - "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3" - ], - "markers": "python_version >= '3.6'", - "version": "==1.0.0" - }, - "pycodestyle": { - "hashes": [ - "sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053", - "sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610" - ], - "markers": "python_version >= '3.6'", - "version": "==2.10.0" - }, - "pyflakes": { - "hashes": [ - "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf", - "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd" - ], - "markers": "python_version >= '3.6'", - "version": "==3.0.1" - }, - "pyproject-api": { - "hashes": [ - "sha256:435f46547a9ff22cf4208ee274fca3e2869aeb062a4834adfc99a4dd64af3cf9", - "sha256:4698a3777c2e0f6b624f8a4599131e2a25376d90fe8d146d7ac74c67c6f97c43" - ], - "markers": "python_version >= '3.7'", - "version": "==1.5.1" - }, - "tomli": { - "hashes": [ - "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", - "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" - ], - "markers": "python_version < '3.11'", - "version": "==2.0.1" - }, - "tox": { - "hashes": [ - "sha256:5a2eac5fb816779dfdf5cb00fecbc27eb0524e4626626bb1de84747b24cacc56", - "sha256:d25a2e6cb261adc489604fafd76cd689efeadfa79709965e965668d6d3f63046" - ], - "index": "pypi", - "version": "==4.5.1" - }, - "virtualenv": { - "hashes": [ - "sha256:278753c47aaef1a0f14e6db8a4c5e1e040e90aea654d0fc1dc7e0d8a42616cc3", - "sha256:48fd3b907b5149c5aab7c23d9790bea4cac6bc6b150af8635febc4cfeab1275a" - ], - "markers": "python_version >= '3.7'", - "version": "==20.22.0" - } - } -} diff --git a/README.rst b/README.rst index f3b805be..c109c832 100644 --- a/README.rst +++ b/README.rst @@ -16,7 +16,7 @@ project as needed or you can install globally with: pip install bagit -Python v2.7+ is required. +A supported version of Python 3 is required. Command Line Usage ------------------ @@ -226,20 +226,11 @@ Contributing to bagit-python development Running the tests ~~~~~~~~~~~~~~~~~ -You can quickly run the tests by having setuptools install dependencies: +You can quickly run the tests using the built-in unittest framework: :: - python setup.py test - -Once your code is working, you can use -`Tox `__ to run the tests with every -supported version of Python which you have installed on the local -system: - -:: - - tox + python -m unittest discover If you have Docker installed, you can run the tests under Linux inside a container: diff --git a/bagit.py b/bagit.py index 458fba8b..32cfb9be 100755 --- a/bagit.py +++ b/bagit.py @@ -1,8 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -from __future__ import absolute_import, division, print_function, unicode_literals - import argparse import codecs import gettext @@ -42,10 +40,8 @@ def find_locale_dir(): TRANSLATION_CATALOG = gettext.translation( "bagit-python", localedir=find_locale_dir(), fallback=True ) -if sys.version_info < (3,): - _ = TRANSLATION_CATALOG.ugettext -else: - _ = TRANSLATION_CATALOG.gettext + +_ = TRANSLATION_CATALOG.gettext MODULE_NAME = "bagit" if __name__ == "__main__" else __name__ @@ -140,7 +136,7 @@ def find_locale_dir(): open_text_file = partial(codecs.open, encoding="utf-8", errors="strict") # This is the same as decoding the byte values in codecs.BOM: -UNICODE_BYTE_ORDER_MARK = "\uFEFF" +UNICODE_BYTE_ORDER_MARK = "\ufeff" def make_bag( @@ -422,8 +418,8 @@ def compare_manifests_with_fs(self): def compare_fetch_with_fs(self): """Compares the fetch entries with the files actually - in the payload, and returns a list of all the files - that still need to be fetched. + in the payload, and returns a list of all the files + that still need to be fetched. """ files_on_fs = set(self.payload_files()) @@ -449,7 +445,7 @@ def payload_files(self): yield rel_path def payload_entries(self): - """Return a dictionary of items """ + """Return a dictionary of items""" # Don't use dict comprehension (compatibility with Python < 2.7) return dict( (key, value) @@ -618,7 +614,9 @@ def is_valid(self, processes=1, fast=False, completeness_only=False): """ try: - self.validate(processes=processes, fast=fast, completeness_only=completeness_only) + self.validate( + processes=processes, fast=fast, completeness_only=completeness_only + ) except BagError: return False @@ -776,7 +774,10 @@ def validate_fetch(self): # each parsed url must resolve to a scheme and point to a netloc # if the scheme is file, netloc is not necessary - if not (all((parsed_url.scheme, parsed_url.netloc)) or parsed_url.scheme == "file"): + if not ( + all((parsed_url.scheme, parsed_url.netloc)) + or parsed_url.scheme == "file" + ): raise BagError(_("Malformed URL in fetch.txt: %s") % url) def _validate_contents(self, processes=1, fast=False, completeness_only=False): @@ -851,11 +852,11 @@ def _validate_completeness(self): only_in_manifests, only_on_fs = self.compare_manifests_with_fs() for path in only_in_manifests: e = FileMissing(path) - LOGGER.warning(force_unicode(e)) + LOGGER.warning(str(e)) errors.append(e) for path in only_on_fs: e = UnexpectedFile(path) - LOGGER.warning(force_unicode(e)) + LOGGER.warning(str(e)) errors.append(e) if errors: @@ -906,7 +907,7 @@ def _validate_entries(self, processes): e = ChecksumMismatch( rel_path, alg, stored_hash.lower(), computed_hash ) - LOGGER.warning(force_unicode(e)) + LOGGER.warning(str(e)) errors.append(e) if errors: @@ -963,7 +964,7 @@ def __init__(self, message, details=None): def __str__(self): if len(self.details) > 0: - details = "; ".join([force_unicode(e) for e in self.details]) + details = "; ".join([str(e) for e in self.details]) return "%s: %s" % (self.message, details) return self.message @@ -988,7 +989,7 @@ def __str__(self): return _( '%(path)s %(algorithm)s validation failed: expected="%(expected)s" found="%(found)s"' ) % { - "path": force_unicode(self.path), + "path": str(self.path), "algorithm": self.algorithm, "expected": self.expected, "found": self.found, @@ -997,9 +998,9 @@ def __str__(self): class FileMissing(ManifestErrorDetail): def __str__(self): - return _( - "%s exists in manifest but was not found on filesystem" - ) % force_unicode(self.path) + return _("%s exists in manifest but was not found on filesystem") % str( + self.path + ) class UnexpectedFile(ManifestErrorDetail): @@ -1034,22 +1035,10 @@ def posix_multiprocessing_worker_initializer(): # is consistency since the input value will be preserved: -def normalize_unicode_py3(s): - return unicodedata.normalize("NFC", s) - - -def normalize_unicode_py2(s): - if isinstance(s, str): - s = s.decode("utf-8") +def normalize_unicode(s): return unicodedata.normalize("NFC", s) -if sys.version_info > (3, 0): - normalize_unicode = normalize_unicode_py3 -else: - normalize_unicode = normalize_unicode_py2 - - def build_unicode_normalized_lookup_dict(filenames): """ Return a dictionary mapping unicode-normalized filenames to as-encoded @@ -1138,7 +1127,7 @@ def _calc_hashes(args): try: f_hashes = _calculate_file_hashes(full_path, f_hashers) except BagValidationError as e: - f_hashes = dict((alg, force_unicode(e)) for alg in f_hashers.keys()) + f_hashes = dict((alg, str(e)) for alg in f_hashers.keys()) return rel_path, f_hashes, hashes @@ -1161,7 +1150,7 @@ def _calculate_file_hashes(full_path, f_hashers): except (OSError, IOError) as e: raise BagValidationError( _("Could not read %(filename)s: %(error)s") - % {"filename": full_path, "error": force_unicode(e)} + % {"filename": full_path, "error": str(e)} ) return dict((alg, h.hexdigest()) for alg, h in f_hashers.items()) @@ -1187,11 +1176,11 @@ def _load_tag_file(tag_file_name, encoding="utf-8-sig"): def _parse_tags(tag_file): """Parses a tag file, according to RFC 2822. This - includes line folding, permitting extra-long - field values. + includes line folding, permitting extra-long + field values. - See http://www.faqs.org/rfcs/rfc2822.html for - more information. + See http://www.faqs.org/rfcs/rfc2822.html for + more information. """ tag_name = None @@ -1237,7 +1226,7 @@ def _make_tag_file(bag_info_path, bag_info): values = [values] for txt in values: # strip CR, LF and CRLF so they don't mess up the tag file - txt = re.sub(r"\n|\r|(\r\n)", "", force_unicode(txt)) + txt = re.sub(r"\n|\r|(\r\n)", "", str(txt)) f.write("%s: %s\n" % (h, txt)) @@ -1433,19 +1422,6 @@ def _decode_filename(s): return s -def force_unicode_py2(s): - """Reliably return a Unicode string given a possible unicode or byte string""" - if isinstance(s, str): - return s.decode("utf-8") - else: - return unicode(s) - - -if sys.version_info > (3, 0): - force_unicode = str -else: - force_unicode = force_unicode_py2 - # following code is used for command line program @@ -1531,7 +1507,10 @@ def _make_parser(): metadata_args = parser.add_argument_group(_("Optional Bag Metadata")) for header in STANDARD_BAG_INFO_HEADERS: metadata_args.add_argument( - "--%s" % header.lower(), type=str, action=BagHeaderAction, default=argparse.SUPPRESS + "--%s" % header.lower(), + type=str, + action=BagHeaderAction, + default=argparse.SUPPRESS, ) parser.add_argument( @@ -1574,7 +1553,9 @@ def main(): parser.error(_("--fast is only allowed as an option for --validate!")) if args.completeness_only and not args.validate: - parser.error(_("--completeness-only is only allowed as an option for --validate!")) + parser.error( + _("--completeness-only is only allowed as an option for --validate!") + ) _configure_logging(args) @@ -1593,7 +1574,9 @@ def main(): if args.fast: LOGGER.info(_("%s valid according to Payload-Oxum"), bag_dir) elif args.completeness_only: - LOGGER.info(_("%s is complete and valid according to Payload-Oxum"), bag_dir) + LOGGER.info( + _("%s is complete and valid according to Payload-Oxum"), bag_dir + ) else: LOGGER.info(_("%s is valid"), bag_dir) except BagError as e: diff --git a/bench.py b/bench.py index 37d14f55..06b4796e 100755 --- a/bench.py +++ b/bench.py @@ -2,7 +2,7 @@ """ This is a little benchmarking script to exercise bagit.make_bag and -bagit.validate using 1-8 parallel processes. It will download some images +bagit.validate using 1-8 parallel processes. It will download some images from NASA for use in bagging the first time it is run. """ diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..7cd2d392 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,40 @@ +[build-system] +requires = ["setuptools>=64", "setuptools-scm>=8"] +build-backend = "setuptools.build_meta" + +[project] +name = "bagit" +dynamic = ["version"] +description = "Create and validate BagIt packages" +readme = {file = "README.rst", content-type = "text/x-rst"} +authors = [ + { name = "Ed Summers", email = "ehs@pobox.com" }, +] +classifiers = [ + "Intended Audience :: Developers", + "License :: Public Domain", + "Programming Language :: Python :: 3", + "Topic :: Communications :: File Sharing", + "Topic :: Software Development :: Libraries :: Python Modules", + "Topic :: System :: Filesystems", +] + +[project.urls] +Homepage = "https://libraryofcongress.github.io/bagit-python/" + +[tool] + +[tool.ruff] +target-version = "py38" + + +[tool.setuptools_scm] + +[tool.isort] +line_length = 110 +default_section = "THIRDPARTY" +known_first_party = "bagit" + +[tool.coverage.run] +branch = true +include = "bagit.py" diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index e9d5a64a..00000000 --- a/setup.cfg +++ /dev/null @@ -1,17 +0,0 @@ -[pycodestyle] -max-line-length=110 - -[flake8] -max-line-length=110 - -[isort] -line_length=110 -default_section=THIRDPARTY -known_first_party=bagit - -[coverage:run] -branch=true -include=bagit.py - -[bdist_wheel] -universal = 1 diff --git a/setup.py b/setup.py index 4bf8f5b6..15c91b43 100644 --- a/setup.py +++ b/setup.py @@ -7,21 +7,11 @@ import os import subprocess import sys -from codecs import open from setuptools import setup -if sys.version_info < (2, 7): - print("Python 2.7 or higher is required") - sys.exit(1) - description = "Create and validate BagIt packages" -with open("README.rst", encoding="utf-8") as readme: - long_description = readme.read() - -tests_require = ["mock", "coverage"] - def get_message_catalogs(): message_catalogs = [] @@ -58,28 +48,14 @@ def get_message_catalogs(): scripts=["bagit.py"], data_files=get_message_catalogs(), description=description, - long_description=long_description, platforms=["POSIX"], - test_suite="test", setup_requires=["setuptools_scm"], - install_requires=["importlib_metadata ; python_version < '3.8'"], - tests_require=tests_require, classifiers=[ "License :: Public Domain", "Intended Audience :: Developers", "Topic :: Communications :: File Sharing", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: System :: Filesystems", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.1", - "Programming Language :: Python :: 3.2", - "Programming Language :: Python :: 3.3", - "Programming Language :: Python :: 3.4", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", ], ) diff --git a/test.py b/test.py index b8acdd62..1e7c22fc 100644 --- a/test.py +++ b/test.py @@ -15,7 +15,7 @@ import unittest from os.path import join as j -import mock +from unittest import mock from io import StringIO import bagit @@ -266,9 +266,7 @@ def test_validation_completeness_error_details(self): def test_bom_in_bagit_txt(self): bag = bagit.make_bag(self.tmpdir) - BOM = codecs.BOM_UTF8 - if sys.version_info[0] >= 3: - BOM = BOM.decode("utf-8") + BOM = codecs.BOM_UTF8.decode("utf-8") with open(j(self.tmpdir, "bagit.txt"), "r") as bf: bagfile = BOM + bf.read() with open(j(self.tmpdir, "bagit.txt"), "w") as bf: @@ -1102,98 +1100,97 @@ def test_fetch_malformed_url(self): class TestCLI(SelfCleaningTestCase): - - @mock.patch('sys.stderr', new_callable=StringIO) + @mock.patch("sys.stderr", new_callable=StringIO) def test_directory_required(self, mock_stderr): testargs = ["bagit.py"] with self.assertRaises(SystemExit) as cm: - with mock.patch.object(sys, 'argv', testargs): + with mock.patch.object(sys, "argv", testargs): bagit.main() self.assertEqual(cm.exception.code, 2) self.assertIn( "error: the following arguments are required: directory", - mock_stderr.getvalue() + mock_stderr.getvalue(), ) - @mock.patch('sys.stderr', new_callable=StringIO) + @mock.patch("sys.stderr", new_callable=StringIO) def test_not_enough_processes(self, mock_stderr): testargs = ["bagit.py", "--processes", "0", self.tmpdir] with self.assertRaises(SystemExit) as cm: - with mock.patch.object(sys, 'argv', testargs): + with mock.patch.object(sys, "argv", testargs): bagit.main() self.assertEqual(cm.exception.code, 2) self.assertIn( "error: The number of processes must be greater than 0", - mock_stderr.getvalue() + mock_stderr.getvalue(), ) - @mock.patch('sys.stderr', new_callable=StringIO) + @mock.patch("sys.stderr", new_callable=StringIO) def test_fast_flag_without_validate(self, mock_stderr): - bag = bagit.make_bag(self.tmpdir) + bagit.make_bag(self.tmpdir) testargs = ["bagit.py", "--fast", self.tmpdir] with self.assertRaises(SystemExit) as cm: - with mock.patch.object(sys, 'argv', testargs): + with mock.patch.object(sys, "argv", testargs): bagit.main() self.assertEqual(cm.exception.code, 2) self.assertIn( "error: --fast is only allowed as an option for --validate!", - mock_stderr.getvalue() + mock_stderr.getvalue(), ) def test_invalid_fast_validate(self): - bag = bagit.make_bag(self.tmpdir) + bagit.make_bag(self.tmpdir) os.remove(j(self.tmpdir, "data", "loc", "2478433644_2839c5e8b8_o_d.jpg")) testargs = ["bagit.py", "--validate", "--completeness-only", self.tmpdir] with self.assertLogs() as captured: with self.assertRaises(SystemExit) as cm: - with mock.patch.object(sys, 'argv', testargs): + with mock.patch.object(sys, "argv", testargs): bagit.main() self.assertEqual(cm.exception.code, 1) self.assertIn( "%s is invalid: Payload-Oxum validation failed." % self.tmpdir, - captured.records[0].getMessage() + captured.records[0].getMessage(), ) def test_valid_fast_validate(self): - bag = bagit.make_bag(self.tmpdir) + bagit.make_bag(self.tmpdir) testargs = ["bagit.py", "--validate", "--fast", self.tmpdir] with self.assertLogs() as captured: with self.assertRaises(SystemExit) as cm: - with mock.patch.object(sys, 'argv', testargs): + with mock.patch.object(sys, "argv", testargs): bagit.main() self.assertEqual(cm.exception.code, 0) self.assertEqual( "%s valid according to Payload-Oxum" % self.tmpdir, - captured.records[0].getMessage() + captured.records[0].getMessage(), ) - @mock.patch('sys.stderr', new_callable=StringIO) + @mock.patch("sys.stderr", new_callable=StringIO) def test_completeness_flag_without_validate(self, mock_stderr): - bag = bagit.make_bag(self.tmpdir) + bagit.make_bag(self.tmpdir) testargs = ["bagit.py", "--completeness-only", self.tmpdir] with self.assertRaises(SystemExit) as cm: - with mock.patch.object(sys, 'argv', testargs): + with mock.patch.object(sys, "argv", testargs): bagit.main() self.assertEqual(cm.exception.code, 2) self.assertIn( "error: --completeness-only is only allowed as an option for --validate!", - mock_stderr.getvalue() + mock_stderr.getvalue(), ) def test_invalid_completeness_validate(self): - bag = bagit.make_bag(self.tmpdir) + bagit.make_bag(self.tmpdir) old_path = j(self.tmpdir, "data", "README") new_path = j(self.tmpdir, "data", "extra_file") os.rename(old_path, new_path) @@ -1202,32 +1199,32 @@ def test_invalid_completeness_validate(self): with self.assertLogs() as captured: with self.assertRaises(SystemExit) as cm: - with mock.patch.object(sys, 'argv', testargs): + with mock.patch.object(sys, "argv", testargs): bagit.main() self.assertEqual(cm.exception.code, 1) self.assertIn( "%s is invalid: Bag is incomplete" % self.tmpdir, - captured.records[-1].getMessage() + captured.records[-1].getMessage(), ) def test_valid_completeness_validate(self): - bag = bagit.make_bag(self.tmpdir) + bagit.make_bag(self.tmpdir) testargs = ["bagit.py", "--validate", "--completeness-only", self.tmpdir] with self.assertLogs() as captured: with self.assertRaises(SystemExit) as cm: - with mock.patch.object(sys, 'argv', testargs): + with mock.patch.object(sys, "argv", testargs): bagit.main() self.assertEqual(cm.exception.code, 0) self.assertEqual( "%s is complete and valid according to Payload-Oxum" % self.tmpdir, - captured.records[0].getMessage() + captured.records[0].getMessage(), ) def test_invalid_full_validate(self): - bag = bagit.make_bag(self.tmpdir) + bagit.make_bag(self.tmpdir) readme = j(self.tmpdir, "data", "README") txt = slurp_text_file(readme) txt = "A" + txt[1:] @@ -1238,26 +1235,23 @@ def test_invalid_full_validate(self): with self.assertLogs() as captured: with self.assertRaises(SystemExit) as cm: - with mock.patch.object(sys, 'argv', testargs): + with mock.patch.object(sys, "argv", testargs): bagit.main() self.assertEqual(cm.exception.code, 1) self.assertIn("Bag validation failed", captured.records[-1].getMessage()) def test_valid_full_validate(self): - bag = bagit.make_bag(self.tmpdir) + bagit.make_bag(self.tmpdir) testargs = ["bagit.py", "--validate", self.tmpdir] with self.assertLogs() as captured: with self.assertRaises(SystemExit) as cm: - with mock.patch.object(sys, 'argv', testargs): + with mock.patch.object(sys, "argv", testargs): bagit.main() self.assertEqual(cm.exception.code, 0) - self.assertEqual( - "%s is valid" % self.tmpdir, - captured.records[-1].getMessage() - ) + self.assertEqual("%s is valid" % self.tmpdir, captured.records[-1].getMessage()) def test_failed_create_bag(self): os.chmod(self.tmpdir, 0) @@ -1266,13 +1260,13 @@ def test_failed_create_bag(self): with self.assertLogs() as captured: with self.assertRaises(SystemExit) as cm: - with mock.patch.object(sys, 'argv', testargs): + with mock.patch.object(sys, "argv", testargs): bagit.main() self.assertEqual(cm.exception.code, 1) self.assertIn( "Failed to create bag in %s" % self.tmpdir, - captured.records[-1].getMessage() + captured.records[-1].getMessage(), ) def test_create_bag(self): @@ -1280,7 +1274,7 @@ def test_create_bag(self): with self.assertLogs() as captured: with self.assertRaises(SystemExit) as cm: - with mock.patch.object(sys, 'argv', testargs): + with mock.patch.object(sys, "argv", testargs): bagit.main() for rec in captured.records: @@ -1289,25 +1283,5 @@ def test_create_bag(self): self.assertEqual(cm.exception.code, 0) -class TestUtils(unittest.TestCase): - def setUp(self): - super(TestUtils, self).setUp() - if sys.version_info >= (3,): - self.unicode_class = str - else: - self.unicode_class = unicode - - def test_force_unicode_str_to_unicode(self): - self.assertIsInstance(bagit.force_unicode("foobar"), self.unicode_class) - - def test_force_unicode_pass_through(self): - self.assertIsInstance(bagit.force_unicode("foobar"), self.unicode_class) - - def test_force_unicode_int(self): - self.assertIsInstance(bagit.force_unicode(1234), self.unicode_class) - - - - if __name__ == "__main__": unittest.main() diff --git a/tox.ini b/tox.ini deleted file mode 100644 index bfa0bd9e..00000000 --- a/tox.ini +++ /dev/null @@ -1,5 +0,0 @@ -[tox] -envlist = py27,py37,py38,py39,py310,py311 - -[testenv] -commands = python setup.py test