From 18915ba68556e0db6cf707f2267fd5ab40f768ce Mon Sep 17 00:00:00 2001 From: James Westby Date: Mon, 15 Feb 2016 17:03:30 +0000 Subject: [PATCH] Add the admin directory to lint checks. --- admin/acceptance.py | 2 +- admin/aws.py | 6 +- admin/client.py | 10 ++-- admin/cluster_add_nodes.py | 2 +- admin/cluster_setup.py | 2 +- admin/flaky.py | 3 +- admin/homebrew.py | 14 +++-- admin/installer/__init__.py | 4 +- admin/installer/cloudformation.py | 2 + admin/packaging.py | 55 ++++++++++--------- admin/release.py | 35 ++++++------ admin/test/test_acceptance.py | 13 +++-- admin/test/test_cluster_setup.py | 6 +- admin/test/test_homebrew.py | 3 +- admin/test/test_images.py | 10 ++-- admin/test/test_packaging.py | 10 ++-- admin/test/test_release.py | 91 +++++++++++++++++++------------ admin/yum.py | 6 +- build.yaml | 4 +- tox.ini | 4 +- 20 files changed, 159 insertions(+), 123 deletions(-) diff --git a/admin/acceptance.py b/admin/acceptance.py index 8ee18b3df0..c1933d7214 100644 --- a/admin/acceptance.py +++ b/admin/acceptance.py @@ -939,7 +939,7 @@ class CommonOptions(Options): lambda option_value: _validate_version_option( option_name=u'flocker-version', option_value=option_value - )], + )], ['build-server', None, 'http://build.clusterhq.com/', 'Base URL of build server for package downloads'], ['number-of-nodes', None, diff --git a/admin/aws.py b/admin/aws.py index 5aa033262c..d9dec95f77 100644 --- a/admin/aws.py +++ b/admin/aws.py @@ -310,14 +310,14 @@ def perform_upload_s3_key_recursively(dispatcher, intent): """ See :class:`UploadToS3Recursively`. """ - for file in intent.files: - path = intent.source_path.preauthChild(file) + for child in intent.files: + path = intent.source_path.preauthChild(child) if path.isfile(): yield Effect( UploadToS3( source_path=intent.source_path, target_bucket=intent.target_bucket, - target_key="%s/%s" % (intent.target_key, file), + target_key="%s/%s" % (intent.target_key, child), file=path, )) diff --git a/admin/client.py b/admin/client.py index 375b48ab99..3f7b597887 100644 --- a/admin/client.py +++ b/admin/client.py @@ -95,17 +95,17 @@ def script(self): return self._script -def make_script_file(dir, effects): +def make_script_file(directory, effects): """ Create a shell script file from a sequence of effects. - :param bytes dir: The directory in which to create the script. + :param bytes directory: The directory in which to create the script. :param Effect effects: An effect which contains the commands, typically a Sequence containing multiple commands. :return: The base filename of the script. """ builder = ScriptBuilder(effects) - fd, filename = tempfile.mkstemp(dir=dir, text=True) + fd, filename = tempfile.mkstemp(dir=directory, text=True) os.write(fd, builder.script()) os.close(fd) os.chmod(filename, 0555) @@ -201,8 +201,8 @@ class RunOptions(Options): ['distribution', None, None, 'The target distribution. ' 'One of {}. With --pip, one of {}'.format( - ', '.join(PACKAGED_CLIENT_DISTRIBUTIONS), - ', '.join(PIP_DISTRIBUTIONS))], + ', '.join(PACKAGED_CLIENT_DISTRIBUTIONS), + ', '.join(PIP_DISTRIBUTIONS))], ['branch', None, None, 'Branch to grab packages from'], ['flocker-version', None, None, 'Flocker version to install'], ['build-server', None, 'http://build.clusterhq.com/', diff --git a/admin/cluster_add_nodes.py b/admin/cluster_add_nodes.py index d447d7dd04..058f08a3e0 100644 --- a/admin/cluster_add_nodes.py +++ b/admin/cluster_add_nodes.py @@ -5,7 +5,7 @@ import sys -from eliot import FileDestination, add_destination +from eliot import FileDestination from twisted.internet.defer import DeferredList, inlineCallbacks from twisted.python.usage import UsageError diff --git a/admin/cluster_setup.py b/admin/cluster_setup.py index a4a00fa364..483e904718 100644 --- a/admin/cluster_setup.py +++ b/admin/cluster_setup.py @@ -10,7 +10,7 @@ from itertools import repeat from pipes import quote as shell_quote -from eliot import FileDestination, add_destination, write_failure +from eliot import FileDestination, write_failure from pyrsistent import pvector from txeffect import perform diff --git a/admin/flaky.py b/admin/flaky.py index ea8508b2b3..5c7d81226c 100644 --- a/admin/flaky.py +++ b/admin/flaky.py @@ -91,7 +91,8 @@ def report_test_tree(output, flaky_tests): """ reporter = TreeReporter(output) for (test, flaky) in flaky_tests: - new_test = clone_test_with_new_id(test, '{}({})'.format(test.id(), ', '.join(flaky.jira_keys))) + new_test = clone_test_with_new_id( + test, '{}({})'.format(test.id(), ', '.join(flaky.jira_keys))) reporter.startTest(new_test) reporter.addSuccess(new_test) reporter.stopTest(new_test) diff --git a/admin/homebrew.py b/admin/homebrew.py index 599331ecfe..d063ad4c46 100755 --- a/admin/homebrew.py +++ b/admin/homebrew.py @@ -178,7 +178,8 @@ def install ENV["LDFLAGS"] = "-L#{{opt_prefix}}/openssl/lib" ENV["CFLAGS"] = "-I#{{opt_prefix}}/openssl/include" - ENV.prepend_create_path "PYTHONPATH", "#{{libexec}}/vendor/lib/python2.7/site-packages" + ENV.prepend_create_path "PYTHONPATH", "#{{libexec}}/vendor/lib/python2.7/\ +site-packages" %w[{dependencies}].each do |r| resource(r).stage do system "python", *Language::Python.setup_install_args(libexec/"vendor") @@ -197,11 +198,11 @@ def install end end """.format( - sdist_url=sdist_url, - sha1=sha1, - class_name=class_name, - resources=format_resource_stanzas(resources), - dependencies=u' '.join(dependencies)) + sdist_url=sdist_url, + sha1=sha1, + class_name=class_name, + resources=format_resource_stanzas(resources), + dependencies=u' '.join(dependencies)) class HomebrewOptions(Options): @@ -255,5 +256,6 @@ def main(args, base_path, top_level): if __name__ == "__main__": + # pylint: disable=relative-import from _preamble import TOPLEVEL, BASEPATH main(sys.argv[1:], top_level=TOPLEVEL, base_path=BASEPATH) diff --git a/admin/installer/__init__.py b/admin/installer/__init__.py index 41bbb206ce..e93286c7dc 100644 --- a/admin/installer/__init__.py +++ b/admin/installer/__init__.py @@ -14,6 +14,6 @@ __all__ = [ "publish_installer_images_main", - MIN_CLUSTER_SIZE, MAX_CLUSTER_SIZE, - InvalidClusterSizeException + 'MIN_CLUSTER_SIZE', 'MAX_CLUSTER_SIZE', + 'InvalidClusterSizeException' ] diff --git a/admin/installer/cloudformation.py b/admin/installer/cloudformation.py index 8e3aa6f774..917edc2ef8 100644 --- a/admin/installer/cloudformation.py +++ b/admin/installer/cloudformation.py @@ -34,9 +34,11 @@ import troposphere.ec2 as ec2 from troposphere.cloudformation import WaitConditionHandle, WaitCondition +# pylint: disable=relative-import from _cloudformation_helper import ( MIN_CLUSTER_SIZE, MAX_CLUSTER_SIZE, InvalidClusterSizeException ) +# pylint: enable=relative-import DEFAULT_CLUSTER_SIZE = MIN_CLUSTER_SIZE NODE_CONFIGURATION_TIMEOUT = u"900" diff --git a/admin/packaging.py b/admin/packaging.py index 5a09d3846f..02e9b767e5 100644 --- a/admin/packaging.py +++ b/admin/packaging.py @@ -87,7 +87,7 @@ def _get_current_distribution(klass): """ :return: A ``Distribution`` representing the current platform. """ - name, version, id = ( + name, version, _ = ( platform.linux_distribution(full_distribution_name=False)) return klass(name=name.lower(), version=version) @@ -663,13 +663,17 @@ def __str__(self): # Cryptography hazmat bindings 'package-installs-python-pycache-dir opt/flocker/lib/python2.7/site-packages/cryptography/hazmat/bindings/__pycache__/', # noqa - # files included by netaddr - we put the whole python we need in the flocker package, and lint complains. - # See: + # files included by netaddr - we put the whole python we need in the + # flocker package, and lint complains. See: # https://lintian.debian.org/tags/package-installs-ieee-data.html - "package-installs-ieee-data opt/flocker/lib/python2.7/site-packages/netaddr/eui/iab.idx", - "package-installs-ieee-data opt/flocker/lib/python2.7/site-packages/netaddr/eui/iab.txt", - "package-installs-ieee-data opt/flocker/lib/python2.7/site-packages/netaddr/eui/oui.idx", - "package-installs-ieee-data opt/flocker/lib/python2.7/site-packages/netaddr/eui/oui.txt", + "package-installs-ieee-data opt/flocker/lib/python2.7/site-packages/" + "netaddr/eui/iab.idx", + "package-installs-ieee-data opt/flocker/lib/python2.7/site-packages/" + "netaddr/eui/iab.txt", + "package-installs-ieee-data opt/flocker/lib/python2.7/site-packages/" + "netaddr/eui/oui.idx", + "package-installs-ieee-data opt/flocker/lib/python2.7/site-packages/" + "netaddr/eui/oui.txt", "package-contains-timestamped-gzip", "systemd-service-file-outside-lib", ), @@ -753,37 +757,37 @@ class PACKAGE(Values): class PACKAGE_PYTHON(PACKAGE): DESCRIPTION = ValueConstant( - 'Flocker: a container data volume manager for your ' - + 'Dockerized applications\n' - + fill('This is the base package of scripts and libraries.', 79) + 'Flocker: a container data volume manager for your ' + + 'Dockerized applications\n' + + fill('This is the base package of scripts and libraries.', 79) ) class PACKAGE_CLI(PACKAGE): DESCRIPTION = ValueConstant( - 'Flocker: a container data volume manager for your' - + ' Dockerized applications\n' - + fill('This meta-package contains links to the Flocker client ' - 'utilities, and has only the dependencies required to run ' - 'those tools', 79) + 'Flocker: a container data volume manager for your' + + ' Dockerized applications\n' + + fill('This meta-package contains links to the Flocker client ' + 'utilities, and has only the dependencies required to run ' + 'those tools', 79) ) class PACKAGE_NODE(PACKAGE): DESCRIPTION = ValueConstant( - 'Flocker: a container data volume manager for your' - + ' Dockerized applications\n' - + fill('This meta-package contains links to the Flocker node ' - 'utilities, and has only the dependencies required to run ' - 'those tools', 79) + 'Flocker: a container data volume manager for your' + + ' Dockerized applications\n' + + fill('This meta-package contains links to the Flocker node ' + 'utilities, and has only the dependencies required to run ' + 'those tools', 79) ) class PACKAGE_DOCKER_PLUGIN(PACKAGE): DESCRIPTION = ValueConstant( - 'Flocker volume plugin for Docker\n' - + fill('This meta-package contains links to the Flocker Docker plugin', - 79) + 'Flocker volume plugin for Docker\n' + + fill('This meta-package contains links to the Flocker Docker plugin', + 79) ) @@ -1089,8 +1093,8 @@ def run(self): ['--volume', '%s:%s' % (host.path, container.path)]) result = call( - ['docker', 'run', '--rm'] - + volume_options + [self.tag] + self.command) + ['docker', 'run', '--rm'] + + volume_options + [self.tag] + self.command) if result: raise SystemExit(result) @@ -1114,6 +1118,7 @@ def available_distributions(flocker_source_path): if path.isdir() and path.child(b"Dockerfile").exists() ) + def build_in_docker(destination_path, distribution, top_level, package_uri): """ Build a flocker package for a given ``distribution`` inside a clean docker diff --git a/admin/release.py b/admin/release.py index de27a1663e..87257d8344 100644 --- a/admin/release.py +++ b/admin/release.py @@ -8,7 +8,6 @@ https://clusterhq.atlassian.net/browse/FLOC-397 """ -import json import yaml import os import sys @@ -18,7 +17,6 @@ from datetime import datetime from subprocess import check_call, check_output from sys import platform as _platform -from urllib import quote from boto.s3.website import RoutingRules, RoutingRule @@ -211,15 +209,16 @@ def publish_docs(flocker_version, doc_version, environment, routing_config): :param bytes doc_version: The version to publish the documentation as. :param Environments environment: The environment to publish the documentation to. - :param dict routing_config: The loaded routing configuration (see ``parse_routing_rules`` for details). + :param dict routing_config: The loaded routing configuration (see + ``parse_routing_rules`` for details). :raises NotARelease: Raised if trying to publish to a version that isn't a release. :raises NotTagged: Raised if publishing to production and the version being published version isn't tagged. """ - if not (is_release(doc_version) - or is_weekly_release(doc_version) - or is_pre_release(doc_version)): + if not (is_release(doc_version) or + is_weekly_release(doc_version) or + is_pre_release(doc_version)): raise NotARelease() if environment == Environments.PRODUCTION: @@ -312,7 +311,8 @@ def publish_docs(flocker_version, doc_version, environment, routing_config): yield Effect(UpdateS3RoutingRules( bucket=configuration.documentation_bucket, - routing_rules=parse_routing_rules(routing_config, configuration.cloudfront_cname), + routing_rules=parse_routing_rules( + routing_config, configuration.cloudfront_cname), )) # Invalidate all the changed paths in cloudfront. @@ -369,7 +369,7 @@ def publish_docs_main(args, base_path, top_level): redirects_path = top_level.descendant(['docs', 'redirects.yaml']) routing_config = yaml.safe_load(redirects_path.getContent()) try: - sync_perform( + sync_perform( dispatcher=ComposedDispatcher([boto_dispatcher, base_dispatcher]), effect=publish_docs( flocker_version=options['flocker-version'], @@ -408,9 +408,9 @@ class UploadOptions(Options): def parseArgs(self): version = self['flocker-version'] - if not (is_release(version) - or is_weekly_release(version) - or is_pre_release(version)): + if not (is_release(version) or + is_weekly_release(version) or + is_pre_release(version)): raise NotARelease() if get_doc_version(version) != version: @@ -654,7 +654,7 @@ def upload_python_packages(scratch_directory, target_bucket, top_level, 'bdist_wheel', '--dist-dir={}'.format(scratch_directory.path)], cwd=top_level.path, stdout=output, stderr=error) - files = set([file.basename() for file in scratch_directory.children()]) + files = set([f.basename() for f in scratch_directory.children()]) yield Effect(UploadToS3Recursively( source_path=scratch_directory, target_bucket=target_bucket, @@ -743,9 +743,9 @@ def calculate_base_branch(version, path): :param bytes path: See :func:`git.Repo.init`. :returns: The base branch from which the new release branch was created. """ - if not (is_release(version) - or is_weekly_release(version) - or is_pre_release(version)): + if not (is_release(version) or + is_weekly_release(version) or + is_pre_release(version)): raise NotARelease() repo = Repo(path=path, search_parent_directories=True) @@ -963,7 +963,7 @@ class TestRedirectsOptions(Options): ["doc-version", None, flocker.__version__, "The version which the documentation sites are expected to redirect " "to.\n" - ], + ], ] optFlags = [ @@ -1008,6 +1008,7 @@ def get_expected_redirects(flocker_version): return expected_redirects + def test_redirects_main(args, base_path, top_level): """ Tests redirects to Flocker documentation. @@ -1050,7 +1051,7 @@ def test_redirects_main(args, base_path, top_level): sys.stderr.write(message) if len(failed_redirects): - raise SystemExit(1) + raise SystemExit(1) else: print 'All tested redirects work correctly.' diff --git a/admin/test/test_acceptance.py b/admin/test/test_acceptance.py index a6f06ce644..6422bb75db 100644 --- a/admin/test/test_acceptance.py +++ b/admin/test/test_acceptance.py @@ -146,10 +146,13 @@ def test_cluster_id(self): _PID=32748 _COMM=docker _EXE=/usr/bin/docker -_CMDLINE=/usr/bin/docker daemon -H fd:// --tlsverify --tlscacert=/etc/flocker/cluster.crt --tlscert=/etc/flocker/node.crt --tlskey=/etc/flocker/node.key -H=0.0.0.0:2376 +_CMDLINE=/usr/bin/docker daemon -H fd:// --tlsverify --tlscacert=/etc/flocker/\ +c luster.crt --tlscert=/etc/flocker/node.crt --tlskey=/etc/flocker/node.key \ +-H=0.0.0.0:2376 _SYSTEMD_CGROUP=/system.slice/docker.service _SYSTEMD_UNIT=docker.service -MESSAGE=time="2015-10-02T13:33:26.192780138Z" level=info msg="GET /v1.20/containers/json" +MESSAGE=time="2015-10-02T13:33:26.192780138Z" level=info msg="GET /v1.20/\ +containers/json" """ @@ -182,13 +185,13 @@ def test_converted(self): some="json", _HOSTNAME="some-host-2", _PROCESS_NAME="flocker-dataset-agent.service", - ), + ), dict( other="values", _HOSTNAME="some-host-1", _PROCESS_NAME="flocker-container-agent.service", - ), - ], + ), + ], self._convert(JOURNAL_EXPORT), ) diff --git a/admin/test/test_cluster_setup.py b/admin/test/test_cluster_setup.py index ad286b8b88..84f73e55f7 100644 --- a/admin/test/test_cluster_setup.py +++ b/admin/test/test_cluster_setup.py @@ -1,8 +1,5 @@ -from twisted.python.usage import UsageError - from flocker.testtools import TestCase -from ..acceptance import CommonOptions from ..cluster_setup import RunOptions @@ -10,12 +7,13 @@ class RunOptionsForTest(RunOptions): """ Patch this so it's not run during the test, which - would result in quite a lot of logic related to + would result in quite a lot of logic related to connecting to a cloud provider being run. """ def postOptions(self): pass + class RunOptionsTest(TestCase): def test_purpose(self): diff --git a/admin/test/test_homebrew.py b/admin/test/test_homebrew.py index f75582141b..466cb8e603 100644 --- a/admin/test/test_homebrew.py +++ b/admin/test/test_homebrew.py @@ -234,7 +234,8 @@ def install ENV["LDFLAGS"] = "-L#{opt_prefix}/openssl/lib" ENV["CFLAGS"] = "-I#{opt_prefix}/openssl/include" - ENV.prepend_create_path "PYTHONPATH", "#{libexec}/vendor/lib/python2.7/site-packages" + ENV.prepend_create_path "PYTHONPATH", "#{libexec}/vendor/lib/python2.7/\ +site-packages" %w[six].each do |r| resource(r).stage do system "python", *Language::Python.setup_install_args(libexec/"vendor") diff --git a/admin/test/test_images.py b/admin/test/test_images.py index 02b98e0751..43777192a9 100644 --- a/admin/test/test_images.py +++ b/admin/test/test_images.py @@ -15,11 +15,6 @@ extract_from_urllib3, ) -# Don't use pyOpenSSL in urllib3 - it causes an ``OpenSSL.SSL.Error`` -# exception when we try an API call on an idled persistent connection. -# See https://github.com/boto/boto3/issues/220 -extract_from_urllib3() - from effect import Effect, sync_perform from effect.testing import perform_sequence @@ -47,6 +42,11 @@ PublishInstallerImagesOptions, PACKER_PATH ) +# Don't use pyOpenSSL in urllib3 - it causes an ``OpenSSL.SSL.Error`` +# exception when we try an API call on an idled persistent connection. +# See https://github.com/boto/boto3/issues/220 +extract_from_urllib3() + try: boto3.session.Session().client('s3').list_buckets() except (ClientError, NoCredentialsError, EndpointConnectionError) as e: diff --git a/admin/test/test_packaging.py b/admin/test/test_packaging.py index 697dab8d8c..4536ab185e 100644 --- a/admin/test/test_packaging.py +++ b/admin/test/test_packaging.py @@ -712,11 +712,11 @@ def test_deb(self): expected_headers = dict( Package=expected_name, Version=( - expected_epoch - + b':' - + expected_rpm_version.version - + '-' - + expected_rpm_version.release + expected_epoch + + b':' + + expected_rpm_version.version + + '-' + + expected_rpm_version.release ), License=expected_license, Vendor=expected_vendor, diff --git a/admin/test/test_release.py b/admin/test/test_release.py index 8581e4195e..a41590fa26 100644 --- a/admin/test/test_release.py +++ b/admin/test/test_release.py @@ -4,7 +4,6 @@ Tests for ``admin.release``. """ -import json import os from hashlib import sha256 @@ -59,10 +58,10 @@ def hard_linking_possible(): return False. """ scratch_directory = FilePath(tempfile.mkdtemp()) - file = scratch_directory.child('src') - file.touch() + test_file = scratch_directory.child('src') + test_file.touch() try: - os.link(file.path, scratch_directory.child('dst').path) + os.link(test_file.path, scratch_directory.child('dst').path) return True except: return False @@ -214,12 +213,18 @@ def test_copies_documentation(self): 'clusterhq-staging-docs': { 'index.html': '', 'en/index.html': '', - 'release/flocker-0.3.0+444.gf05215b/index.html': 'index-content', - 'release/flocker-0.3.0+444.gf05215b/sub/index.html': 'sub-index-content', - 'release/flocker-0.3.0+444.gf05215b/other.html': 'other-content', - 'release/flocker-0.3.0+392.gd50b558/index.html': 'bad-index', - 'release/flocker-0.3.0+392.gd50b558/sub/index.html': 'bad-sub-index', - 'release/flocker-0.3.0+392.gd50b558/other.html': 'bad-other', + 'release/flocker-0.3.0+444.gf05215b/index.html': + 'index-content', + 'release/flocker-0.3.0+444.gf05215b/sub/index.html': + 'sub-index-content', + 'release/flocker-0.3.0+444.gf05215b/other.html': + 'other-content', + 'release/flocker-0.3.0+392.gd50b558/index.html': + 'bad-index', + 'release/flocker-0.3.0+392.gd50b558/sub/index.html': + 'bad-sub-index', + 'release/flocker-0.3.0+392.gd50b558/other.html': + 'bad-other', }, }) self.publish_docs(aws, '0.3.0+444.gf05215b', '0.3.1', @@ -229,12 +234,18 @@ def test_copies_documentation(self): # originals 'index.html': '', 'en/index.html': '', - 'release/flocker-0.3.0+444.gf05215b/index.html': 'index-content', - 'release/flocker-0.3.0+444.gf05215b/sub/index.html': 'sub-index-content', - 'release/flocker-0.3.0+444.gf05215b/other.html': 'other-content', - 'release/flocker-0.3.0+392.gd50b558/index.html': 'bad-index', - 'release/flocker-0.3.0+392.gd50b558/sub/index.html': 'bad-sub-index', - 'release/flocker-0.3.0+392.gd50b558/other.html': 'bad-other', + 'release/flocker-0.3.0+444.gf05215b/index.html': + 'index-content', + 'release/flocker-0.3.0+444.gf05215b/sub/index.html': + 'sub-index-content', + 'release/flocker-0.3.0+444.gf05215b/other.html': + 'other-content', + 'release/flocker-0.3.0+392.gd50b558/index.html': + 'bad-index', + 'release/flocker-0.3.0+392.gd50b558/sub/index.html': + 'bad-sub-index', + 'release/flocker-0.3.0+392.gd50b558/other.html': + 'bad-other', # and new copies 'en/latest/index.html': 'index-content', 'en/latest/sub/index.html': 'sub-index-content', @@ -261,12 +272,18 @@ def test_copies_documentation_production(self): 'en/latest/index.html': '', }, 'clusterhq-staging-docs': { - 'release/flocker-0.3.1/index.html': 'index-content', - 'release/flocker-0.3.1/sub/index.html': 'sub-index-content', - 'release/flocker-0.3.1/other.html': 'other-content', - 'release/flocker-0.3.0+392.gd50b558/index.html': 'bad-index', - 'release/flocker-0.3.0+392.gd50b558/sub/index.html': 'bad-sub-index', - 'release/flocker-0.3.0+392.gd50b558/other.html': 'bad-other', + 'release/flocker-0.3.1/index.html': + 'index-content', + 'release/flocker-0.3.1/sub/index.html': + 'sub-index-content', + 'release/flocker-0.3.1/other.html': + 'other-content', + 'release/flocker-0.3.0+392.gd50b558/index.html': + 'bad-index', + 'release/flocker-0.3.0+392.gd50b558/sub/index.html': + 'bad-sub-index', + 'release/flocker-0.3.0+392.gd50b558/other.html': + 'bad-other', } }) self.publish_docs(aws, '0.3.1', '0.3.1', @@ -303,8 +320,10 @@ def test_deletes_removed_documentation(self): 'en/0.3.1/index.html': 'old-index-content', 'en/0.3.1/sub/index.html': 'old-sub-index-content', 'en/0.3.1/other.html': 'other-content', - 'release/flocker-0.3.0+444.gf05215b/index.html': 'index-content', - 'release/flocker-0.3.0+444.gf05215b/sub/index.html': 'sub-index-content', + 'release/flocker-0.3.0+444.gf05215b/index.html': + 'index-content', + 'release/flocker-0.3.0+444.gf05215b/sub/index.html': + 'sub-index-content', }, }) self.publish_docs(aws, '0.3.0+444.gf05215b', '0.3.1', @@ -318,8 +337,10 @@ def test_deletes_removed_documentation(self): 'en/0.3.1/index.html': 'index-content', 'en/0.3.1/sub/index.html': 'sub-index-content', # and the originals - 'release/flocker-0.3.0+444.gf05215b/index.html': 'index-content', - 'release/flocker-0.3.0+444.gf05215b/sub/index.html': 'sub-index-content', + 'release/flocker-0.3.0+444.gf05215b/index.html': + 'index-content', + 'release/flocker-0.3.0+444.gf05215b/sub/index.html': + 'sub-index-content', }) def test_updated_routing_rules(self): @@ -1150,13 +1171,13 @@ def get_dispatcher(self): 'other.xml.gz', ]: for key in files_on_s3: - if (key.endswith(metadata_file) - and key.startswith(repodata_path)): + if (key.endswith(metadata_file) and + key.startswith(repodata_path)): expected_files.add( os.path.join( repodata_path, - sha256(files_on_s3[key]).hexdigest() - + '-' + metadata_file) + sha256(files_on_s3[key]).hexdigest() + + '-' + metadata_file) ) break else: @@ -1276,7 +1297,8 @@ def setUp(self): self.build_server = 'http://test-build-server.example' # XXX: FLOC-3540 remove skip once the support for Ubuntu 15.10 is released - @skipIf(True, "Skipping until the changes to support Ubuntu 15.10 are released - FLOC-3540") + @skipIf(True, "Skipping until the changes to support Ubuntu 15.10 " + "are released - FLOC-3540") def test_repositories_created(self): """ Calling :func:`upload_packages` creates repositories for supported @@ -1333,7 +1355,8 @@ def test_repositories_created(self): self.assertEqual(expected_files, set(files_on_s3)) # XXX: FLOC-3540 remove skip once the support for Ubuntu 15.10 is released - @skipIf(True, "Skipping until the changes to support Ubuntu 15.10 are released - FLOC-3540") + @skipIf(True, "Skipping until the changes to support Ubuntu 15.10" + " are released - FLOC-3540") def test_key_suffixes(self): """ The OS part of the keys for created repositories have suffixes (or not) @@ -1772,8 +1795,8 @@ def test_branch_only_exists_remote(self): self.assertEqual( calculate_base_branch( - version='0.3.0rc2', - path=clone.working_dir).name, + version='0.3.0rc2', + path=clone.working_dir).name, "master") diff --git a/admin/yum.py b/admin/yum.py index c0f99bbe3a..8931b51c5e 100644 --- a/admin/yum.py +++ b/admin/yum.py @@ -108,7 +108,7 @@ def perform_create_repository(dispatcher, intent): elif package_type == PackageTypes.DEB: packages_file = intent.repository_path.child('Packages') scan_packages(repository=intent.repository_path.path, - packages_file=packages_file.path) + packages_file=packages_file.path) intent.repository_path.child('Release').setContent( "Origin: ClusterHQ\n") @@ -154,9 +154,9 @@ def _perform_create_repository(self, dispatcher, intent): package_type = intent.distribution.package_type() packages = set([ - file for file in + f for f in intent.repository_path.listdir() - if file.endswith(package_type.value)]) + if f.endswith(package_type.value)]) if package_type == PackageTypes.RPM: metadata_directory = intent.repository_path.child('repodata') diff --git a/build.yaml b/build.yaml index 691cde754c..b41043f438 100644 --- a/build.yaml +++ b/build.yaml @@ -880,9 +880,9 @@ common_cli: # Disable exiting on error, so we can run multiple lints set +e # run flake8 lint tests on Flocker source code - flake8 --format=pylint --output flake8.lint.txt benchmark flocker + flake8 --format=pylint --output flake8.lint.txt admin benchmark flocker JOB_EXIT_STATUS="$( updateExitStatus $? )" - pylint benchmark flocker > pylint.lint.txt + pylint admin benchmark flocker > pylint.lint.txt JOB_EXIT_STATUS="$( updateExitStatus $? )" set -e diff --git a/tox.ini b/tox.ini index b3c942a46c..e83f01c923 100644 --- a/tox.ini +++ b/tox.ini @@ -19,8 +19,8 @@ changedir = {toxinidir} commands = pip install -r requirements.txt pip install .[dev] - flake8 benchmark flocker - pylint benchmark flocker + flake8 admin benchmark flocker + pylint admin benchmark flocker [testenv:sphinx] basepython = python2.7