From 098c4b29eaf9cfc3798d91df88e6087cc0637487 Mon Sep 17 00:00:00 2001 From: Pierre Sassoulas Date: Thu, 10 Feb 2022 20:14:45 +0100 Subject: [PATCH 1/2] Rebased semi-manual changes on #5787 --- doc/exts/pylint_extensions.py | 4 +- doc/exts/pylint_features.py | 4 +- doc/user_guide/message-control.rst | 16 +++--- examples/deprecation_checker.py | 9 +-- .../classes/special_methods_checker.py | 4 +- .../implicit_booleaness_checker.py | 5 +- .../refactoring/recommendation_checker.py | 6 +- .../refactoring/refactoring_checker.py | 56 +++++++++---------- pylint/checkers/stdlib.py | 1 + pylint/checkers/strings.py | 13 +++-- pylint/checkers/typecheck.py | 4 +- pylint/checkers/unsupported_version.py | 12 ++-- pylint/config/config_initialization.py | 4 +- pylint/config/configuration_mixin.py | 4 +- pylint/config/find_default_config_files.py | 4 +- pylint/epylint.py | 4 +- pylint/extensions/_check_docs_utils.py | 4 +- pylint/extensions/broad_try_clause.py | 6 +- pylint/extensions/code_style.py | 5 +- pylint/extensions/comparetozero.py | 5 +- pylint/extensions/comparison_placement.py | 4 +- pylint/extensions/confusing_elif.py | 4 +- pylint/extensions/docparams.py | 20 +++---- pylint/extensions/emptystring.py | 7 ++- pylint/extensions/for_any_all.py | 5 +- pylint/extensions/typing.py | 7 ++- pylint/lint/parallel.py | 12 ++-- pylint/message/message_definition_store.py | 10 ++-- pylint/message/message_id_store.py | 12 ++-- pylint/pyreverse/dot_printer.py | 5 +- pylint/pyreverse/mermaidjs_printer.py | 5 +- pylint/pyreverse/plantuml_printer.py | 5 +- pylint/reporters/reports_handler_mix_in.py | 4 +- pylint/testutils/decorator.py | 4 +- .../functional/lint_module_output_update.py | 4 +- pylint/testutils/lint_module_test.py | 4 +- pylint/testutils/output_line.py | 17 +++--- pylint/testutils/primer.py | 9 ++- pylint/testutils/pyreverse.py | 1 + pylint/utils/file_state.py | 8 +-- pylint/utils/linterstats.py | 8 ++- script/bump_changelog.py | 4 +- script/fix_documentation.py | 5 +- tests/checkers/unittest_design.py | 5 +- tests/checkers/unittest_format.py | 11 ++-- tests/checkers/unittest_stdlib.py | 7 ++- tests/checkers/unittest_variables.py | 7 +-- .../config/test_functional_config_loading.py | 4 +- tests/config/unittest_config.py | 5 +- tests/extensions/test_check_docs_utils.py | 4 +- tests/primer/test_primer_external.py | 7 +-- tests/primer/test_primer_stdlib.py | 4 +- tests/pyreverse/test_main.py | 1 + tests/pyreverse/test_utils.py | 16 +++--- tests/test_import_graph.py | 4 +- tests/testutils/data/init_hook.py | 4 +- tests/testutils/test_output_line.py | 6 +- tests/unittest_reporting.py | 5 +- 58 files changed, 234 insertions(+), 190 deletions(-) diff --git a/doc/exts/pylint_extensions.py b/doc/exts/pylint_extensions.py index d8f8dfeca5..8d506627b3 100755 --- a/doc/exts/pylint_extensions.py +++ b/doc/exts/pylint_extensions.py @@ -2,7 +2,9 @@ # Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html # For details: https://github.com/PyCQA/pylint/blob/main/LICENSE -"""Script used to generate the extensions file before building the actual documentation.""" +"""Script used to generate the extensions file before building the actual +documentation. +""" import os import re diff --git a/doc/exts/pylint_features.py b/doc/exts/pylint_features.py index a867dd05fe..d7bb6c8417 100755 --- a/doc/exts/pylint_features.py +++ b/doc/exts/pylint_features.py @@ -2,7 +2,9 @@ # Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html # For details: https://github.com/PyCQA/pylint/blob/main/LICENSE -"""Script used to generate the features file before building the actual documentation.""" +"""Script used to generate the features file before building the actual +documentation. +""" import os diff --git a/doc/user_guide/message-control.rst b/doc/user_guide/message-control.rst index 0f735e533a..f1ab58c060 100644 --- a/doc/user_guide/message-control.rst +++ b/doc/user_guide/message-control.rst @@ -112,7 +112,7 @@ Here's an example with all these rules in a single place: pass def meth1(self, arg): - """this issues a message""" + """This issues a message""" print(self) def meth2(self, arg): @@ -122,14 +122,14 @@ Here's an example with all these rules in a single place: + "foo") def meth3(self): - """test one line disabling""" + """Test one line disabling""" # no error print(self.bla) # pylint: disable=no-member # error print(self.blop) def meth4(self): - """test re-enabling""" + """Test re-enabling""" # pylint: disable=no-member # no error print(self.bla) @@ -139,7 +139,7 @@ Here's an example with all these rules in a single place: print(self.blip) def meth5(self): - """test IF sub-block re-enabling""" + """Test IF sub-block re-enabling""" # pylint: disable=no-member # no error print(self.bla) @@ -154,7 +154,7 @@ Here's an example with all these rules in a single place: print(self.blip) def meth6(self): - """test TRY/EXCEPT sub-block re-enabling""" + """Test TRY/EXCEPT sub-block re-enabling""" # pylint: disable=no-member # no error print(self.bla) @@ -169,7 +169,7 @@ Here's an example with all these rules in a single place: print(self.blip) def meth7(self): - """test one line block opening disabling""" + """Test one line block opening disabling""" if self.blop: # pylint: disable=no-member # error print(self.blip) @@ -180,7 +180,7 @@ Here's an example with all these rules in a single place: print(self.blip) def meth8(self): - """test late disabling""" + """Test late disabling""" # error print(self.blip) # pylint: disable=no-member @@ -189,7 +189,7 @@ Here's an example with all these rules in a single place: print(self.blop) def meth9(self): - """test next line disabling""" + """Test next line disabling""" # no error # pylint: disable-next=no-member print(self.bla) diff --git a/examples/deprecation_checker.py b/examples/deprecation_checker.py index 79a7285379..d336417f47 100644 --- a/examples/deprecation_checker.py +++ b/examples/deprecation_checker.py @@ -1,6 +1,6 @@ -"""Example checker detecting deprecated functions/methods. Following example searches for usages of -deprecated function `deprecated_function` and deprecated method `MyClass.deprecated_method` -from module mymodule: +"""Example checker detecting deprecated functions/methods. Following example searches +for usages of deprecated function `deprecated_function` and deprecated method +`MyClass.deprecated_method` from module mymodule: .. code-block:: console $ cat mymodule.py @@ -59,7 +59,8 @@ class DeprecationChecker(DeprecatedMixin, BaseChecker): name = "deprecated" def deprecated_methods(self) -> Set[str]: - """Callback method called by DeprecatedMixin for every method/function found in the code. + """Callback method called by DeprecatedMixin for every method/function found in + the code. Returns: collections.abc.Container of deprecated function/method names. diff --git a/pylint/checkers/classes/special_methods_checker.py b/pylint/checkers/classes/special_methods_checker.py index 7eb61440df..7005abd8d4 100644 --- a/pylint/checkers/classes/special_methods_checker.py +++ b/pylint/checkers/classes/special_methods_checker.py @@ -23,8 +23,8 @@ def _safe_infer_call_result(node, caller, context=None): """Safely infer the return value of a function. - Returns None if inference failed or if there is some ambiguity (more than - one node has been inferred). Otherwise, returns inferred value. + Returns None if inference failed or if there is some ambiguity (more than one node + has been inferred). Otherwise, returns inferred value. """ try: inferit = node.infer_call_result(caller, context=context) diff --git a/pylint/checkers/refactoring/implicit_booleaness_checker.py b/pylint/checkers/refactoring/implicit_booleaness_checker.py index b1fa672788..7d9bcb674a 100644 --- a/pylint/checkers/refactoring/implicit_booleaness_checker.py +++ b/pylint/checkers/refactoring/implicit_booleaness_checker.py @@ -127,9 +127,8 @@ def instance_has_bool(class_def: nodes.ClassDef) -> bool: @utils.check_messages("use-implicit-booleaness-not-len") def visit_unaryop(self, node: nodes.UnaryOp) -> None: - """`not len(S)` must become `not S` regardless if the parent block - is a test condition or something else (boolean expression) - e.g. `if not len(S):` + """`not len(S)` must become `not S` regardless if the parent block is a test + condition or something else (boolean expression) e.g. `if not len(S):` """ if ( isinstance(node, nodes.UnaryOp) diff --git a/pylint/checkers/refactoring/recommendation_checker.py b/pylint/checkers/refactoring/recommendation_checker.py index d517cfd1b5..e4d6bc225d 100644 --- a/pylint/checkers/refactoring/recommendation_checker.py +++ b/pylint/checkers/refactoring/recommendation_checker.py @@ -102,7 +102,9 @@ def _check_consider_iterating_dictionary(self, node: nodes.Call) -> None: self.add_message("consider-iterating-dictionary", node=node) def _check_use_maxsplit_arg(self, node: nodes.Call) -> None: - """Add message when accessing first or last elements of a str.split() or str.rsplit().""" + """Add message when accessing first or last elements of a str.split() or + str.rsplit(). + """ # Check if call is split() or rsplit() if not ( @@ -337,7 +339,7 @@ def visit_const(self, node: nodes.Const) -> None: def _detect_replacable_format_call(self, node: nodes.Const) -> None: """Check whether a string is used in a call to format() or '%' and whether it - can be replaced by an f-string + can be replaced by an f-string. """ if ( isinstance(node.parent, nodes.Attribute) diff --git a/pylint/checkers/refactoring/refactoring_checker.py b/pylint/checkers/refactoring/refactoring_checker.py index 1bf75e77e3..198060f9de 100644 --- a/pylint/checkers/refactoring/refactoring_checker.py +++ b/pylint/checkers/refactoring/refactoring_checker.py @@ -118,8 +118,8 @@ def _is_a_return_statement(node: nodes.Call) -> bool: def _is_part_of_with_items(node: nodes.Call) -> bool: - """Checks if one of the node's parents is a ``nodes.With`` node and that the node itself is located - somewhere under its ``items``. + """Checks if one of the node's parents is a ``nodes.With`` node and that the node + itself is located somewhere under its ``items``. """ frame = node.frame(future=True) current = node @@ -133,8 +133,8 @@ def _is_part_of_with_items(node: nodes.Call) -> bool: def _will_be_released_automatically(node: nodes.Call) -> bool: - """Checks if a call that could be used in a ``with`` statement is used in an alternative - construct which would ensure that its __exit__ method is called. + """Checks if a call that could be used in a ``with`` statement is used in an + alternative construct which would ensure that its __exit__ method is called. """ callables_taking_care_of_exit = frozenset( ( @@ -151,8 +151,8 @@ def _will_be_released_automatically(node: nodes.Call) -> bool: class ConsiderUsingWithStack(NamedTuple): - """Stack for objects that may potentially trigger a R1732 message - if they are not used in a ``with`` block later on. + """Stack for objects that may potentially trigger a R1732 message if they are not + used in a ``with`` block later on. """ module_scope: Dict[str, nodes.NodeNG] = {} @@ -181,9 +181,9 @@ def clear_all(self) -> None: class RefactoringChecker(checkers.BaseTokenChecker): """Looks for code which can be refactored. - This checker also mixes the astroid and the token approaches - in order to create knowledge about whether an "else if" node - is a true "else if" node, or an "elif" node. + This checker also mixes the astroid and the token approaches in order to create + knowledge about whether an "else if" node is a true "else if" node, or an "elif" + node. """ __implements__ = (interfaces.ITokenChecker, interfaces.IAstroidChecker) @@ -488,10 +488,9 @@ def _is_bool_const(node): def _is_actual_elif(self, node): """Check if the given node is an actual elif. - This is a problem we're having with the builtin ast module, - which splits `elif` branches into a separate if statement. - Unfortunately we need to know the exact type in certain - cases. + This is a problem we're having with the builtin ast module, which splits `elif` + branches into a separate if statement. Unfortunately we need to know the exact + type in certain cases. """ if isinstance(node.parent, nodes.If): orelse = node.parent.orelse @@ -504,11 +503,10 @@ def _is_actual_elif(self, node): def _check_simplifiable_if(self, node): """Check if the given if node can be simplified. - The if statement can be reduced to a boolean expression - in some cases. For instance, if there are two branches - and both of them return a boolean value that depends on - the result of the statement's test, then this can be reduced - to `bool(test)` without losing any functionality. + The if statement can be reduced to a boolean expression in some cases. For + instance, if there are two branches and both of them return a boolean value that + depends on the result of the statement's test, then this can be reduced to + `bool(test)` without losing any functionality. """ if self._is_actual_elif(node): @@ -1301,8 +1299,8 @@ def _apply_boolean_simplification_rules(operator, values): def _simplify_boolean_operation(self, bool_op): """Attempts to simplify a boolean operation. - Recursively applies simplification on the operator terms, - and keeps track of whether reductions have been made. + Recursively applies simplification on the operator terms, and keeps track of + whether reductions have been made. """ children = list(bool_op.get_children()) intermediate = [ @@ -1323,8 +1321,8 @@ def _simplify_boolean_operation(self, bool_op): def _check_simplifiable_condition(self, node): """Check if a boolean condition can be simplified. - Variables will not be simplified, even in the value can be inferred, - and expressions like '3 + 4' will remain expanded. + Variables will not be simplified, even in the value can be inferred, and + expressions like '3 + 4' will remain expanded. """ if not utils.is_test_condition(node): return @@ -1503,7 +1501,7 @@ def _check_consider_using_with(self, node: nodes.Call): self.add_message("consider-using-with", node=node) def _check_use_list_or_dict_literal(self, node: nodes.Call) -> None: - """Check if empty list or dict is created by using the literal [] or {}.""" + """Check if empty list or dict is created by using '[]' or '{}'.""" if node.as_string() in {"list()", "dict()"}: inferred = utils.safe_infer(node.func) if isinstance(inferred, nodes.ClassDef) and not node.args: @@ -1514,6 +1512,7 @@ def _check_use_list_or_dict_literal(self, node: nodes.Call) -> None: def _check_consider_using_join(self, aug_assign): """We start with the augmented assignment and work our way upwards. + Names of variables for nodes if match successful: result = '' # assign for number in ['1', '2', '3'] # for_loop @@ -1638,7 +1637,8 @@ def _check_unnecessary_comprehension(self, node: nodes.Comprehension) -> None: def _is_and_or_ternary(node): """Returns true if node is 'condition and true_value or false_value' form. - All of: condition, true_value and false_value should not be a complex boolean expression + All of: condition, true_value and false_value should not be a complex boolean + expression """ return ( isinstance(node, nodes.BoolOp) @@ -1671,7 +1671,6 @@ def _check_consistent_returns(self, node: nodes.FunctionDef) -> None: Args: node (nodes.FunctionDef): the function holding the return statements. - """ # explicit return statements are those with a not None value explicit_returns = [ @@ -1756,7 +1755,6 @@ def _is_node_return_ended(self, node: nodes.NodeNG) -> bool: Returns: bool: True if the node ends with an explicit statement, False otherwise. - """ # Recursion base case if isinstance(node, nodes.Return): @@ -1828,9 +1826,9 @@ def _is_function_def_never_returning(self, node: nodes.FunctionDef) -> bool: return False def _check_return_at_the_end(self, node): - """Check for presence of a *single* return statement at the end of a - function. "return" or "return None" are useless because None is the - default return type if they are missing. + """Check for presence of a *single* return statement at the end of a function. + "return" or "return None" are useless because None is the default return type if + they are missing. NOTE: produces a message only if there is a single return statement in the function body. Otherwise _check_consistent_returns() is called! diff --git a/pylint/checkers/stdlib.py b/pylint/checkers/stdlib.py index fe6113953e..89abeddc9e 100644 --- a/pylint/checkers/stdlib.py +++ b/pylint/checkers/stdlib.py @@ -637,6 +637,7 @@ def _check_redundant_assert(self, node, infer): def _check_datetime(self, node): """Check that a datetime was inferred. + If so, emit boolean-datetime warning. """ try: diff --git a/pylint/checkers/strings.py b/pylint/checkers/strings.py index e89e8db5f3..fd44289e04 100644 --- a/pylint/checkers/strings.py +++ b/pylint/checkers/strings.py @@ -226,8 +226,8 @@ def get_access_path(key, parts): - """Given a list of format specifiers, returns - the final access path (e.g. a.b.c[0][1]). + """Given a list of format specifiers, returns the final access path (e.g. + a.b.c[0][1]). """ path = [] for is_attribute, specifier in parts: @@ -256,8 +256,8 @@ def arg_matches_format_type(arg_type, format_type): class StringFormatChecker(BaseChecker): - """Checks string formatting operations to ensure that the format string - is valid and the arguments match the format string. + """Checks string formatting operations to ensure that the format string is valid and + the arguments match the format string. """ __implements__ = (IAstroidChecker,) @@ -539,8 +539,8 @@ def _check_new_format(self, node, func): self._check_new_format_specifiers(node, fields, named_arguments) def _check_new_format_specifiers(self, node, fields, named): - """Check attribute and index access in the format - string ("{0.a}" and "{0[a]}"). + """Check attribute and index access in the format string ("{0.a}" and + "{0[a]}"). """ for key, specifiers in fields: # Obtain the argument. If it can't be obtained @@ -933,6 +933,7 @@ def register(linter: "PyLinter") -> None: def str_eval(token): """Mostly replicate `ast.literal_eval(token)` manually to avoid any performance hit. + This supports f-strings, contrary to `ast.literal_eval`. We have to support all string literal notations: https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals diff --git a/pylint/checkers/typecheck.py b/pylint/checkers/typecheck.py index b28815e704..7b2cab10d5 100644 --- a/pylint/checkers/typecheck.py +++ b/pylint/checkers/typecheck.py @@ -1142,7 +1142,9 @@ def visit_assign(self, node: nodes.Assign) -> None: self._check_dundername_is_string(node) def _check_assignment_from_function_call(self, node: nodes.Assign) -> None: - """When assigning to a function call, check that the function returns a valid value.""" + """Check that if assigning to a function call, the function is + possibly returning something valuable + """ if not isinstance(node.value, nodes.Call): return diff --git a/pylint/checkers/unsupported_version.py b/pylint/checkers/unsupported_version.py index b3369b877f..ec1234091f 100644 --- a/pylint/checkers/unsupported_version.py +++ b/pylint/checkers/unsupported_version.py @@ -5,8 +5,8 @@ # Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html # For details: https://github.com/PyCQA/pylint/blob/main/LICENSE -"""Checker for features used that are not supported by all python versions -indicated by the py-version setting. +"""Checker for features used that are not supported by all python versions indicated by +the py-version setting. """ @@ -28,8 +28,8 @@ class UnsupportedVersionChecker(BaseChecker): - """Checker for features that are not supported by all python versions - indicated by the py-version setting. + """Checker for features that are not supported by all python versions indicated by + the py-version setting. """ __implements__ = (IAstroidChecker,) @@ -67,8 +67,8 @@ def visit_decorators(self, node: nodes.Decorators) -> None: self._check_typing_final(node) def _check_typing_final(self, node: nodes.Decorators) -> None: - """Add a message when the `typing.final` decorator is used and the - py-version is lower than 3.8 + """Add a message when the `typing.final` decorator is used and the py- version + is lower than 3.8. """ if self._py38_plus: return diff --git a/pylint/config/config_initialization.py b/pylint/config/config_initialization.py index d038220302..a006901a8e 100644 --- a/pylint/config/config_initialization.py +++ b/pylint/config/config_initialization.py @@ -19,8 +19,8 @@ def _config_initialization( config_file: Union[None, str, Path] = None, verbose_mode: Optional[bool] = None, ) -> List[str]: - """Parse all available options, read config files and command line arguments and - set options accordingly. + """Parse all available options, read config files and command line arguments and set + options accordingly. """ # Read the config file. The parser is stored on linter.cfgfile_parser diff --git a/pylint/config/configuration_mixin.py b/pylint/config/configuration_mixin.py index a2abcb7528..c5f29781d9 100644 --- a/pylint/config/configuration_mixin.py +++ b/pylint/config/configuration_mixin.py @@ -6,8 +6,8 @@ class ConfigurationMixIn(OptionsManagerMixIn, OptionsProviderMixIn): - """Basic mixin for simple configurations which don't need the - manager / providers model + """Basic mixin for simple configurations which don't need the manager / providers + model. """ def __init__(self, *args, **kwargs): diff --git a/pylint/config/find_default_config_files.py b/pylint/config/find_default_config_files.py index 10c31345de..8d1fc1885d 100644 --- a/pylint/config/find_default_config_files.py +++ b/pylint/config/find_default_config_files.py @@ -71,7 +71,9 @@ def find_default_config_files() -> Iterator[str]: def find_pylintrc() -> Optional[str]: - """Search the pylint rc file and return its path if it finds it, else return None.""" + """Search the pylint rc file and return its path if it finds it, else return + None. + """ for config_file in find_default_config_files(): if config_file.endswith("pylintrc"): return config_file diff --git a/pylint/epylint.py b/pylint/epylint.py index 223f723a77..bbe9e096ae 100755 --- a/pylint/epylint.py +++ b/pylint/epylint.py @@ -68,9 +68,7 @@ def _get_env(): - """Extracts the environment PYTHONPATH and appends the current 'sys.path' - to it. - """ + """Extracts the environment PYTHONPATH and appends the current 'sys.path' to it.""" env = dict(os.environ) env["PYTHONPATH"] = os.pathsep.join(sys.path) return env diff --git a/pylint/extensions/_check_docs_utils.py b/pylint/extensions/_check_docs_utils.py index 70b312539e..4c3713c4ba 100644 --- a/pylint/extensions/_check_docs_utils.py +++ b/pylint/extensions/_check_docs_utils.py @@ -401,8 +401,8 @@ def match_param_docs(self): class EpytextDocstring(SphinxDocstring): """Epytext is similar to Sphinx. See the docs: - http://epydoc.sourceforge.net/epytext.html - http://epydoc.sourceforge.net/fields.html#fields + http://epydoc.sourceforge.net/epytext.html + http://epydoc.sourceforge.net/fields.html#fields. It's used in PyCharm: https://www.jetbrains.com/help/pycharm/2016.1/creating-documentation-comments.html#d848203e314 diff --git a/pylint/extensions/broad_try_clause.py b/pylint/extensions/broad_try_clause.py index a45dc5fe6b..118a027de9 100644 --- a/pylint/extensions/broad_try_clause.py +++ b/pylint/extensions/broad_try_clause.py @@ -23,10 +23,8 @@ class BroadTryClauseChecker(checkers.BaseChecker): """Checks for try clauses with too many lines. - According to PEP 8, ``try`` clauses shall contain the absolute minimum - amount of code. This checker enforces a maximum number of statements within - ``try`` clauses. - + According to PEP 8, ``try`` clauses shall contain the absolute minimum amount of + code. This checker enforces a maximum number of statements within ``try`` clauses. """ __implements__ = interfaces.IAstroidChecker diff --git a/pylint/extensions/code_style.py b/pylint/extensions/code_style.py index 7082c991ce..9f9d801c93 100644 --- a/pylint/extensions/code_style.py +++ b/pylint/extensions/code_style.py @@ -252,6 +252,7 @@ def _check_prev_sibling_to_if_stmt( prev_sibling: Optional[nodes.NodeNG], name: Optional[str] ) -> TypeGuard[Union[nodes.Assign, nodes.AnnAssign]]: """Check if previous sibling is an assignment with the same name. + Ignore statements which span multiple lines. """ if prev_sibling is None or prev_sibling.tolineno - prev_sibling.fromlineno != 0: @@ -278,8 +279,8 @@ def _check_ignore_assignment_expr_suggestion( ) -> bool: """Return True if suggestion for assignment expr should be ignored. - E.g., in cases where a match statement would be a better fit - (multiple conditions). + E.g., in cases where a match statement would be a better fit (multiple + conditions). """ if isinstance(node.test, nodes.Compare): next_if_node: Optional[nodes.If] = None diff --git a/pylint/extensions/comparetozero.py b/pylint/extensions/comparetozero.py index 592e15a5b9..0a81039cbc 100644 --- a/pylint/extensions/comparetozero.py +++ b/pylint/extensions/comparetozero.py @@ -31,9 +31,10 @@ def _is_constant_zero(node): class CompareToZeroChecker(checkers.BaseChecker): """Checks for comparisons to zero. + Most of the time you should use the fact that integers with a value of 0 are false. - An exception to this rule is when 0 is allowed in the program and has a - different meaning than None! + An exception to this rule is when 0 is allowed in the program and has a different + meaning than None! """ __implements__ = (interfaces.IAstroidChecker,) diff --git a/pylint/extensions/comparison_placement.py b/pylint/extensions/comparison_placement.py index f34f1eb821..6ca5db5e77 100644 --- a/pylint/extensions/comparison_placement.py +++ b/pylint/extensions/comparison_placement.py @@ -1,8 +1,8 @@ # Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html # For details: https://github.com/PyCQA/pylint/blob/main/LICENSE -"""Checks for yoda comparisons (variable before constant) -See https://en.wikipedia.org/wiki/Yoda_conditions +"""Checks for yoda comparisons (variable before constant) See +https://en.wikipedia.org/wiki/Yoda_conditions. """ diff --git a/pylint/extensions/confusing_elif.py b/pylint/extensions/confusing_elif.py index 99588d0b8d..f490a54f19 100644 --- a/pylint/extensions/confusing_elif.py +++ b/pylint/extensions/confusing_elif.py @@ -20,7 +20,9 @@ class ConfusingConsecutiveElifChecker(BaseChecker): - """Checks if "elif" is used right after an indented block that finishes with "if" or "elif" itself.""" + """Checks if "elif" is used right after an indented block that finishes with "if" or + "elif" itself. + """ __implements__ = IAstroidChecker diff --git a/pylint/extensions/docparams.py b/pylint/extensions/docparams.py index 35324ac041..ad891f45dd 100644 --- a/pylint/extensions/docparams.py +++ b/pylint/extensions/docparams.py @@ -387,8 +387,8 @@ def _compare_missing_args( expected_argument_names, warning_node, ): - """Compare the found argument names with the expected ones and - generate a message if there are arguments missing. + """Compare the found argument names with the expected ones and generate a + message if there are arguments missing. :param found_argument_names: argument names found in the docstring :type found_argument_names: set @@ -423,8 +423,8 @@ def _compare_different_args( expected_argument_names, warning_node, ): - """Compare the found argument names with the expected ones and - generate a message if there are extra arguments found. + """Compare the found argument names with the expected ones and generate a + message if there are extra arguments found. :param found_argument_names: argument names found in the docstring :type found_argument_names: set @@ -461,8 +461,8 @@ def _compare_ignored_args( ignored_argument_names, warning_node, ): - """Compare the found argument names with the ignored ones and - generate a message if there are ignored arguments found. + """Compare the found argument names with the ignored ones and generate a message + if there are ignored arguments found. :param found_argument_names: argument names found in the docstring :type found_argument_names: set @@ -492,10 +492,10 @@ def check_arguments_in_docstring( warning_node: astroid.NodeNG, accept_no_param_doc: Optional[bool] = None, ): - """Check that all parameters in a function, method or class constructor - on the one hand and the parameters mentioned in the parameter - documentation (e.g. the Sphinx tags 'param' and 'type') on the other - hand are consistent with each other. + """Check that all parameters in a function, method or class constructor on the + one hand and the parameters mentioned in the parameter documentation (e.g. the + Sphinx tags 'param' and 'type') on the other hand are consistent with each + other. * Undocumented parameters except 'self' are noticed. * Undocumented parameter types except for 'self' and the ``*`` diff --git a/pylint/extensions/emptystring.py b/pylint/extensions/emptystring.py index 15bdd1e584..9a878bc05b 100644 --- a/pylint/extensions/emptystring.py +++ b/pylint/extensions/emptystring.py @@ -26,9 +26,10 @@ class CompareToEmptyStringChecker(checkers.BaseChecker): """Checks for comparisons to empty string. - Most of the time you should use the fact that empty strings are false. - An exception to this rule is when an empty string value is allowed in the program - and has a different meaning than None! + + Most of the time you should use the fact that empty strings are false. An exception + to this rule is when an empty string value is allowed in the program and has a + different meaning than None! """ __implements__ = (interfaces.IAstroidChecker,) diff --git a/pylint/extensions/for_any_all.py b/pylint/extensions/for_any_all.py index 915fae8a33..690fc7bbf9 100644 --- a/pylint/extensions/for_any_all.py +++ b/pylint/extensions/for_any_all.py @@ -47,8 +47,9 @@ def visit_for(self, node: nodes.For) -> None: @staticmethod def _build_suggested_string(node: nodes.For, final_return_bool: bool) -> str: - """When a nodes.For node can be rewritten as an any/all statement, return a suggestion for that statement - final_return_bool is the boolean literal returned after the for loop if all conditions fail + """When a nodes.For node can be rewritten as an any/all statement, return a + suggestion for that statement final_return_bool is the boolean literal returned + after the for loop if all conditions fail. """ loop_var = node.target.as_string() loop_iter = node.iter.as_string() diff --git a/pylint/extensions/typing.py b/pylint/extensions/typing.py index cc68bc35ef..fb5aa2b560 100644 --- a/pylint/extensions/typing.py +++ b/pylint/extensions/typing.py @@ -257,9 +257,10 @@ def _check_for_typing_alias( @check_messages("consider-using-alias") def leave_module(self, node: nodes.Module) -> None: - """After parsing of module is complete, add messages for - 'consider-using-alias' check. Make sure results are safe - to recommend / collision free. + """After parsing of module is complete, add messages for 'consider- using-alias' + check. + + Make sure results are safe to recommend / collision free. """ if self._py37_plus and not self._py39_plus: msg_future_import = self._msg_postponed_eval_hint(node) diff --git a/pylint/lint/parallel.py b/pylint/lint/parallel.py index f3e2d6cb45..49d356e6fb 100644 --- a/pylint/lint/parallel.py +++ b/pylint/lint/parallel.py @@ -51,7 +51,8 @@ def _get_new_args(message): def _worker_initialize( linter: bytes, arguments: Union[None, str, Sequence[str]] = None ) -> None: - """Function called to initialize a worker for a Process within a multiprocessing Pool. + """Function called to initialize a worker for a Process within a multiprocessing + Pool. :param linter: A linter-class (PyLinter) instance pickled with dill :param arguments: File or module name(s) to lint and to be added to sys.path @@ -133,10 +134,11 @@ def check_parallel( files: Iterable[FileItem], arguments: Union[None, str, Sequence[str]] = None, ) -> None: - """Use the given linter to lint the files with given amount of workers (jobs) - This splits the work filestream-by-filestream. If you need to do work across - multiple files, as in the similarity-checker, then inherit from MapReduceMixin and - implement the map/reduce mixin functionality. + """Use the given linter to lint the files with given amount of workers (jobs) This + splits the work filestream-by-filestream. + + If you need to do work across multiple files, as in the similarity- checker, then + inherit from MapReduceMixin and implement the map/reduce mixin functionality. """ # The linter is inherited by all the pool's workers, i.e. the linter # is identical to the linter object here. This is required so that diff --git a/pylint/message/message_definition_store.py b/pylint/message/message_definition_store.py index c8b4e166a0..ec41cfc0cd 100644 --- a/pylint/message/message_definition_store.py +++ b/pylint/message/message_definition_store.py @@ -15,8 +15,8 @@ class MessageDefinitionStore: - """The messages store knows information about every possible message definition but has - no particular state during analysis. + """The messages store knows information about every possible message definition but + has no particular state during analysis. """ def __init__(self) -> None: @@ -55,9 +55,9 @@ def register_message(self, message: MessageDefinition) -> None: def get_message_definitions(self, msgid_or_symbol: str) -> List[MessageDefinition]: """Returns the Message definition for either a numeric or symbolic id. - The cache has no limit as its size will likely stay minimal. For each message we store - about 1000 characters, so even if we would have 1000 messages the cache would only - take up ~= 1 Mb. + The cache has no limit as its size will likely stay minimal. For each message we + store about 1000 characters, so even if we would have 1000 messages the cache + would only take up ~= 1 Mb. """ return [ self._messages_definitions[m] diff --git a/pylint/message/message_id_store.py b/pylint/message/message_id_store.py index e585be5a06..e6c025f449 100644 --- a/pylint/message/message_id_store.py +++ b/pylint/message/message_id_store.py @@ -7,7 +7,9 @@ class MessageIdStore: - """The MessageIdStore store MessageId and make sure that there is a 1-1 relation between msgid and symbol.""" + """The MessageIdStore store MessageId and make sure that there is a 1-1 relation + between msgid and symbol. + """ def __init__(self) -> None: self.__msgid_to_symbol: Dict[str, str] = {} @@ -51,8 +53,8 @@ def register_message_definition( def add_msgid_and_symbol(self, msgid: str, symbol: str) -> None: """Add valid message id. - There is a little duplication with add_legacy_msgid_and_symbol to avoid a function call, - this is called a lot at initialization. + There is a little duplication with add_legacy_msgid_and_symbol to avoid a + function call, this is called a lot at initialization. """ self.__msgid_to_symbol[msgid] = symbol self.__symbol_to_msgid[symbol] = msgid @@ -62,8 +64,8 @@ def add_legacy_msgid_and_symbol( ) -> None: """Add valid legacy message id. - There is a little duplication with add_msgid_and_symbol to avoid a function call, - this is called a lot at initialization. + There is a little duplication with add_msgid_and_symbol to avoid a function + call, this is called a lot at initialization. """ self.__msgid_to_symbol[msgid] = symbol self.__symbol_to_msgid[symbol] = msgid diff --git a/pylint/pyreverse/dot_printer.py b/pylint/pyreverse/dot_printer.py index ee19a5b659..fccdcf5579 100644 --- a/pylint/pyreverse/dot_printer.py +++ b/pylint/pyreverse/dot_printer.py @@ -67,7 +67,10 @@ def emit_node( type_: NodeType, properties: Optional[NodeProperties] = None, ) -> None: - """Create a new node. Nodes can be classes, packages, participants etc.""" + """Create a new node. + + Nodes can be classes, packages, participants etc. + """ if properties is None: properties = NodeProperties(label=name) shape = SHAPES[type_] diff --git a/pylint/pyreverse/mermaidjs_printer.py b/pylint/pyreverse/mermaidjs_printer.py index 0140419931..a725d03f4c 100644 --- a/pylint/pyreverse/mermaidjs_printer.py +++ b/pylint/pyreverse/mermaidjs_printer.py @@ -38,7 +38,10 @@ def emit_node( type_: NodeType, properties: Optional[NodeProperties] = None, ) -> None: - """Create a new node. Nodes can be classes, packages, participants etc.""" + """Create a new node. + + Nodes can be classes, packages, participants etc. + """ if properties is None: properties = NodeProperties(label=name) stereotype = "~~Interface~~" if type_ is NodeType.INTERFACE else "" diff --git a/pylint/pyreverse/plantuml_printer.py b/pylint/pyreverse/plantuml_printer.py index 5693e626ff..2e643fe1fe 100644 --- a/pylint/pyreverse/plantuml_printer.py +++ b/pylint/pyreverse/plantuml_printer.py @@ -48,7 +48,10 @@ def emit_node( type_: NodeType, properties: Optional[NodeProperties] = None, ) -> None: - """Create a new node. Nodes can be classes, packages, participants etc.""" + """Create a new node. + + Nodes can be classes, packages, participants etc. + """ if properties is None: properties = NodeProperties(label=name) stereotype = " << interface >>" if type_ is NodeType.INTERFACE else "" diff --git a/pylint/reporters/reports_handler_mix_in.py b/pylint/reporters/reports_handler_mix_in.py index 01d9947fd7..8f65397b5b 100644 --- a/pylint/reporters/reports_handler_mix_in.py +++ b/pylint/reporters/reports_handler_mix_in.py @@ -25,8 +25,8 @@ class ReportsHandlerMixIn: - """A mix-in class containing all the reports and stats manipulation - related methods for the main lint class + """A mix-in class containing all the reports and stats manipulation related methods + for the main lint class. """ def __init__(self) -> None: diff --git a/pylint/testutils/decorator.py b/pylint/testutils/decorator.py index 4cee70302f..21ddaaa8ef 100644 --- a/pylint/testutils/decorator.py +++ b/pylint/testutils/decorator.py @@ -12,8 +12,8 @@ def set_config(**kwargs): """Decorator for setting config values on a checker. - Passing the args and kwargs back to the test function itself - allows this decorator to be used on parametrized test cases. + Passing the args and kwargs back to the test function itself allows this decorator + to be used on parametrized test cases. """ def _wrapper(fun): diff --git a/pylint/testutils/functional/lint_module_output_update.py b/pylint/testutils/functional/lint_module_output_update.py index 0bd46fc0bf..0f76752ea8 100644 --- a/pylint/testutils/functional/lint_module_output_update.py +++ b/pylint/testutils/functional/lint_module_output_update.py @@ -14,7 +14,9 @@ class LintModuleOutputUpdate(LintModuleTest): - """Class to be used if expected output files should be updated instead of checked.""" + """Class to be used if expected output files should be updated instead of + checked. + """ class TestDialect(csv.excel): """Dialect used by the csv writer.""" diff --git a/pylint/testutils/lint_module_test.py b/pylint/testutils/lint_module_test.py index ed20ed8604..e121dc4813 100644 --- a/pylint/testutils/lint_module_test.py +++ b/pylint/testutils/lint_module_test.py @@ -259,7 +259,9 @@ def _check_output_text( expected_output: List[OutputLine], actual_output: List[OutputLine], ) -> None: - """This is a function because we want to be able to update the text in LintModuleOutputUpdate.""" + """This is a function because we want to be able to update the text in + LintModuleOutputUpdate. + """ assert expected_output == actual_output, self.error_msg_for_unequal_output( expected_output, actual_output ) diff --git a/pylint/testutils/output_line.py b/pylint/testutils/output_line.py index e851ccfbe9..0ca79dc6a0 100644 --- a/pylint/testutils/output_line.py +++ b/pylint/testutils/output_line.py @@ -93,8 +93,9 @@ def from_msg(cls, msg: Message, check_endline: bool = True) -> "OutputLine": @staticmethod def _get_column(column: str) -> int: - """Handle column numbers except for python < 3.8. The ast parser in those versions doesn't - return them. + """Handle column numbers except for python < 3.8. + + The ast parser in those versions doesn't return them. """ if not PY38_PLUS: # We check the column only for the new better ast parser introduced in python 3.8 @@ -103,8 +104,8 @@ def _get_column(column: str) -> int: @staticmethod def _get_py38_none_value(value: T, check_endline: bool) -> Optional[T]: - """Used to make end_line and end_column None as indicated by our version compared to - `min_pyver_end_position`. + """Used to make end_line and end_column None as indicated by our version + compared to `min_pyver_end_position`. """ if not check_endline: return None # pragma: no cover @@ -114,8 +115,8 @@ def _get_py38_none_value(value: T, check_endline: bool) -> Optional[T]: def from_csv( cls, row: Union[Sequence[str], str], check_endline: bool = True ) -> "OutputLine": - """Create an OutputLine from a comma separated list (the functional tests expected - output .txt files). + """Create an OutputLine from a comma separated list (the functional tests + expected output .txt files). """ if isinstance(row, str): row = row.split(",") @@ -166,9 +167,7 @@ def from_csv( raise MalformedOutputLineException(row, e) from e def to_csv(self) -> Tuple[str, str, str, str, str, str, str, str]: - """Convert an OutputLine to a tuple of string to be written by a - csv-writer. - """ + """Convert an OutputLine to a tuple of string to be written by a csv- writer.""" return ( str(self.symbol), str(self.lineno), diff --git a/pylint/testutils/primer.py b/pylint/testutils/primer.py index 558ad582e0..8113c44bee 100644 --- a/pylint/testutils/primer.py +++ b/pylint/testutils/primer.py @@ -72,11 +72,10 @@ def pylint_args(self) -> List[str]: def lazy_clone(self) -> None: # pragma: no cover """Concatenates the target directory and clones the file. - Not expected to be tested as the primer won't work if it doesn't. - It's tested in the continuous integration primers, only the coverage - is not calculated on everything. If lazy clone breaks for local use - we'll probably notice because we'll have a fatal when launching the - primer locally. + Not expected to be tested as the primer won't work if it doesn't. It's tested in + the continuous integration primers, only the coverage is not calculated on + everything. If lazy clone breaks for local use we'll probably notice because + we'll have a fatal when launching the primer locally. """ logging.info("Lazy cloning %s", self.url) if not self.clone_directory.exists(): diff --git a/pylint/testutils/pyreverse.py b/pylint/testutils/pyreverse.py index ac72def41c..34d66d2177 100644 --- a/pylint/testutils/pyreverse.py +++ b/pylint/testutils/pyreverse.py @@ -8,6 +8,7 @@ # A NamedTuple is not possible as some tests need to modify attributes during the test. class PyreverseConfig: # pylint: disable=too-many-instance-attributes, too-many-arguments """Holds the configuration options for Pyreverse. + The default values correspond to the defaults of the options' parser. """ diff --git a/pylint/utils/file_state.py b/pylint/utils/file_state.py index 654cb725b9..52761471f8 100644 --- a/pylint/utils/file_state.py +++ b/pylint/utils/file_state.py @@ -61,8 +61,8 @@ def _collect_block_lines( node: nodes.NodeNG, msg_state: MessageStateDict, ) -> None: - """Recursively walk (depth first) AST to collect block level options - line numbers. + """Recursively walk (depth first) AST to collect block level options line + numbers. """ for child in node.get_children(): self._collect_block_lines(msgs_store, child, msg_state) @@ -135,8 +135,8 @@ def handle_ignored_message( """Report an ignored message. state_scope is either MSG_STATE_SCOPE_MODULE or MSG_STATE_SCOPE_CONFIG, - depending on whether the message was disabled locally in the module, - or globally. + depending on whether the message was disabled locally in the module, or + globally. """ if state_scope == MSG_STATE_SCOPE_MODULE: assert isinstance(line, int) # should always be int inside module scope diff --git a/pylint/utils/linterstats.py b/pylint/utils/linterstats.py index 54b98d533e..0cf12ad43b 100644 --- a/pylint/utils/linterstats.py +++ b/pylint/utils/linterstats.py @@ -152,7 +152,9 @@ def __str__(self) -> str: {self.percent_duplicated_lines}""" def init_single_module(self, module_name: str) -> None: - """Use through PyLinter.set_current_module so PyLinter.current_name is consistent.""" + """Use through PyLinter.set_current_module so PyLinter.current_name is + consistent. + """ self.by_module[module_name] = ModuleStats( convention=0, error=0, fatal=0, info=0, refactor=0, statement=0, warning=0 ) @@ -303,7 +305,9 @@ def reset_message_count(self) -> None: def merge_stats(stats: List[LinterStats]): - """Used to merge multiple stats objects into a new one when pylint is run in parallel mode.""" + """Used to merge multiple stats objects into a new one when pylint is run in + parallel mode. + """ merged = LinterStats() for stat in stats: merged.bad_names["argument"] += stat.bad_names["argument"] diff --git a/script/bump_changelog.py b/script/bump_changelog.py index 6e25719d4c..af32b3cdfd 100644 --- a/script/bump_changelog.py +++ b/script/bump_changelog.py @@ -1,7 +1,9 @@ # ORIGINAL here: https://github.com/PyCQA/astroid/blob/main/script/bump_changelog.py # DO NOT MODIFY DIRECTLY -"""This script permits to upgrade the changelog in astroid or pylint when releasing a version.""" +"""This script permits to upgrade the changelog in astroid or pylint when releasing a +version. +""" # pylint: disable=logging-fstring-interpolation import argparse import enum diff --git a/script/fix_documentation.py b/script/fix_documentation.py index 0fc4e347e2..4039968f16 100644 --- a/script/fix_documentation.py +++ b/script/fix_documentation.py @@ -1,4 +1,7 @@ -"""Small script to fix various issues with the documentation. Used by pre-commit.""" +"""Small script to fix various issues with the documentation. + +Used by pre-commit. +""" import argparse import re import sys diff --git a/tests/checkers/unittest_design.py b/tests/checkers/unittest_design.py index b60106590e..09d4975bf2 100644 --- a/tests/checkers/unittest_design.py +++ b/tests/checkers/unittest_design.py @@ -24,8 +24,8 @@ class TestDesignChecker(CheckerTestCase): max_parents=1, ) def test_too_many_ancestors_ignored_parents_are_skipped(self) -> None: - """Make sure that classes listed in ``ignored-parents`` aren't counted - by the too-many-ancestors message. + """Make sure that classes listed in ``ignored-parents`` aren't counted by the + too- many-ancestors message. """ node = astroid.extract_node( @@ -56,6 +56,7 @@ def test_exclude_too_few_methods_with_value(self) -> None: def test_ignore_paths_with_no_value(self) -> None: """Test exclude-too-few-public-methods option with no value. + Compare against actual list to see if validator works. """ options = get_global_option(self.checker, "exclude-too-few-public-methods") diff --git a/tests/checkers/unittest_format.py b/tests/checkers/unittest_format.py index 30972cb5c7..6e0025a9f5 100644 --- a/tests/checkers/unittest_format.py +++ b/tests/checkers/unittest_format.py @@ -108,8 +108,8 @@ def testCheckKeywordParensHandlesUnnecessaryParens(self) -> None: self.checker._check_keyword_parentheses(_tokenize_str(code), offset) def testNoSuperfluousParensWalrusOperatorIf(self) -> None: - """Parenthesis change the meaning of assignment in the walrus operator - and so are not always superfluous: + """Parenthesis change the meaning of assignment in the walrus operator and so + are not always superfluous: """ cases = [ ("if (odd := is_odd(i))\n"), @@ -164,8 +164,7 @@ class TestCheckSpace(CheckerTestCase): def test_encoding_token(self) -> None: """Make sure the encoding token doesn't change the checker's behavior. - _tokenize_str doesn't produce an encoding token, but - reading a file does + _tokenize_str doesn't produce an encoding token, but reading a file does """ with self.assertNoMessages(): encoding_token = tokenize.TokenInfo( @@ -178,8 +177,8 @@ def test_encoding_token(self) -> None: def test_disable_global_option_end_of_line() -> None: - """Test for issue with disabling tokenizer messages - that extend beyond the scope of the ast tokens + """Test for issue with disabling tokenizer messages that extend beyond the scope of + the ast tokens. """ file_ = tempfile.NamedTemporaryFile("w", delete=False) with file_: diff --git a/tests/checkers/unittest_stdlib.py b/tests/checkers/unittest_stdlib.py index 88f5c79229..cc8991b9df 100644 --- a/tests/checkers/unittest_stdlib.py +++ b/tests/checkers/unittest_stdlib.py @@ -43,9 +43,10 @@ class TestStdlibChecker(CheckerTestCase): def test_deprecated_no_qname_on_unexpected_nodes(self) -> None: """Test that we don't crash on nodes which don't have a qname method. - While this test might seem weird since it uses a transform, it's actually testing a crash - that happened in production, but there was no way to retrieve the code for which this - occurred (how an AssignAttr got to be the result of a function inference beats me...) + While this test might seem weird since it uses a transform, it's actually + testing a crash that happened in production, but there was no way to retrieve + the code for which this occurred (how an AssignAttr got to be the result of a + function inference beats me...) """ def infer_func( diff --git a/tests/checkers/unittest_variables.py b/tests/checkers/unittest_variables.py index f42c3fbae6..3527034039 100644 --- a/tests/checkers/unittest_variables.py +++ b/tests/checkers/unittest_variables.py @@ -182,8 +182,7 @@ class MyObject(object): self.walk(node) def test_nested_lambda(self) -> None: - """Make sure variables from parent lambdas - aren't noted as undefined + """Make sure variables from parent lambdas aren't noted as undefined. https://github.com/PyCQA/pylint/issues/760 """ @@ -197,9 +196,7 @@ def test_nested_lambda(self) -> None: @set_config(ignored_argument_names=re.compile("arg")) def test_ignored_argument_names_no_message(self) -> None: - """Make sure is_ignored_argument_names properly ignores - function arguments - """ + """Make sure is_ignored_argument_names properly ignores function arguments.""" node = astroid.parse( """ def fooby(arg): diff --git a/tests/config/test_functional_config_loading.py b/tests/config/test_functional_config_loading.py index 1937435f73..3f571aa338 100644 --- a/tests/config/test_functional_config_loading.py +++ b/tests/config/test_functional_config_loading.py @@ -2,8 +2,8 @@ # For details: https://github.com/PyCQA/pylint/blob/main/LICENSE """This launches the configuration functional tests. This permits to test configuration -files by providing a file with the appropriate extension in the ``tests/config/functional`` -directory. +files by providing a file with the appropriate extension in the +``tests/config/functional`` directory. Let's say you have a regression_list_crash.toml file to test. Then, if there is an error in the conf, add ``regression_list_crash.out`` alongside your file with the expected output of pylint in it. Use diff --git a/tests/config/unittest_config.py b/tests/config/unittest_config.py index fd56a3f928..80f895000d 100644 --- a/tests/config/unittest_config.py +++ b/tests/config/unittest_config.py @@ -76,8 +76,8 @@ def test__regexp_csv_validator_invalid() -> None: class TestPyLinterOptionSetters(CheckerTestCase): - """Class to check the set_config decorator and get_global_option util - for options declared in PyLinter. + """Class to check the set_config decorator and get_global_option util for options + declared in PyLinter. """ class Checker(BaseChecker): @@ -99,6 +99,7 @@ def test_ignore_paths_with_value(self) -> None: def test_ignore_paths_with_no_value(self) -> None: """Test ignore-paths option with no value. + Compare against actual list to see if validator works. """ options = get_global_option(self.checker, "ignore-paths") diff --git a/tests/extensions/test_check_docs_utils.py b/tests/extensions/test_check_docs_utils.py index 2fa5719fd5..77b7938681 100644 --- a/tests/extensions/test_check_docs_utils.py +++ b/tests/extensions/test_check_docs_utils.py @@ -12,7 +12,9 @@ # Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html # For details: https://github.com/PyCQA/pylint/blob/main/LICENSE -"""Unit tests for utils functions in :mod:`pylint.extensions._check_docs_utils`.""" +"""Unit tests for the pylint checkers in :mod:`pylint.extensions.check_docs`, in +particular the parameter documentation checker `DocstringChecker` +""" import astroid import pytest diff --git a/tests/primer/test_primer_external.py b/tests/primer/test_primer_external.py index 193eabf1d8..d9d4136ccc 100644 --- a/tests/primer/test_primer_external.py +++ b/tests/primer/test_primer_external.py @@ -70,10 +70,9 @@ def test_primer_external_packages_no_crash_batch_two( def _primer_test(package: PackageToLint, caplog: LogCaptureFixture) -> None: """Runs pylint over external packages to check for crashes and fatal messages. - We only check for crashes (bit-encoded exit code 32) and fatal messages - (bit-encoded exit code 1). We assume that these external repositories do not - have any fatal errors in their code so that any fatal errors are pylint false - positives + We only check for crashes (bit-encoded exit code 32) and fatal messages (bit- + encoded exit code 1). We assume that these external repositories do not have any + fatal errors in their code so that any fatal errors are pylint false positives """ caplog.set_level(logging.INFO) package.lazy_clone() diff --git a/tests/primer/test_primer_stdlib.py b/tests/primer/test_primer_stdlib.py index 824c1feac9..72f5cdfabf 100644 --- a/tests/primer/test_primer_stdlib.py +++ b/tests/primer/test_primer_stdlib.py @@ -46,7 +46,9 @@ def _patch_stdout(out): def test_primer_stdlib_no_crash( test_module_location: str, test_module_name: str, capsys: CaptureFixture ) -> None: - """Test that pylint does not produce any crashes or fatal errors on stdlib modules.""" + """Test that pylint does not produce any crashes or fatal errors on stdlib + modules. + """ __tracebackhide__ = True # pylint: disable=unused-variable os.chdir(test_module_location) with _patch_stdout(io.StringIO()): diff --git a/tests/pyreverse/test_main.py b/tests/pyreverse/test_main.py index 01f3ccc099..a40121d499 100644 --- a/tests/pyreverse/test_main.py +++ b/tests/pyreverse/test_main.py @@ -25,6 +25,7 @@ def setup_path(request) -> Iterator: @pytest.mark.usefixtures("setup_path") def test_project_root_in_sys_path(): """Test the context manager adds the project root directory to sys.path. + This should happen when pyreverse is run from any directory """ with fix_import_path([TEST_DATA_DIR]): diff --git a/tests/pyreverse/test_utils.py b/tests/pyreverse/test_utils.py index f571b18473..60c3a77c18 100644 --- a/tests/pyreverse/test_utils.py +++ b/tests/pyreverse/test_utils.py @@ -84,8 +84,8 @@ class A: @patch("pylint.pyreverse.utils.get_annotation") @patch("astroid.node_classes.NodeNG.infer", side_effect=astroid.InferenceError) def test_infer_node_1(mock_infer: Any, mock_get_annotation: Any) -> None: - """Return set() when astroid.InferenceError is raised and an annotation has - not been returned + """Return set() when astroid.InferenceError is raised and an annotation has not been + returned. """ mock_get_annotation.return_value = None node = astroid.extract_node("a: str = 'mystr'") @@ -97,8 +97,8 @@ def test_infer_node_1(mock_infer: Any, mock_get_annotation: Any) -> None: @patch("pylint.pyreverse.utils.get_annotation") @patch("astroid.node_classes.NodeNG.infer") def test_infer_node_2(mock_infer: Any, mock_get_annotation: Any) -> None: - """Return set(node.infer()) when InferenceError is not raised and an - annotation has not been returned + """Return set(node.infer()) when InferenceError is not raised and an annotation has + not been returned. """ mock_get_annotation.return_value = None node = astroid.extract_node("a: str = 'mystr'") @@ -108,8 +108,8 @@ def test_infer_node_2(mock_infer: Any, mock_get_annotation: Any) -> None: def test_infer_node_3() -> None: - """Return a set containing a nodes.ClassDef object when the attribute - has a type annotation + """Return a set containing a nodes.ClassDef object when the attribute has a type + annotation. """ node = astroid.extract_node( """ @@ -127,9 +127,7 @@ def __init__(self, component: Component): def test_infer_node_4() -> None: - """Verify the label for an argument with a typehint of the type - nodes.Subscript - """ + """Verify the label for an argument with a typehint of the type nodes.Subscript.""" node = astroid.extract_node( """ class MyClass: diff --git a/tests/test_import_graph.py b/tests/test_import_graph.py index 9a4ea59a63..bede4c903a 100644 --- a/tests/test_import_graph.py +++ b/tests/test_import_graph.py @@ -73,7 +73,9 @@ def test_dependencies_graph(dest: str) -> None: any(shutil.which(x) for x in ("dot", "gv")), reason="dot or gv is installed" ) def test_missing_graphviz(filename: str) -> None: - """Raises if graphviz is not installed, and defaults to png if no extension given.""" + """Raises if graphviz is not installed, and defaults to png if no extension + given. + """ with pytest.raises(RuntimeError, match=r"Cannot generate `graph\.png`.*"): imports._dependencies_graph(filename, {"a": {"b", "c"}, "b": {"c"}}) diff --git a/tests/testutils/data/init_hook.py b/tests/testutils/data/init_hook.py index f16492d3aa..f28633ea86 100644 --- a/tests/testutils/data/init_hook.py +++ b/tests/testutils/data/init_hook.py @@ -1,3 +1,3 @@ -"""This file should never be tested as the init-hook in the configuration -file prevents the test runner from getting here. +"""This file should never be tested as the init-hook in the configuration file prevents +the test runner from getting here. """ diff --git a/tests/testutils/test_output_line.py b/tests/testutils/test_output_line.py index 48ae107213..9359fc165a 100644 --- a/tests/testutils/test_output_line.py +++ b/tests/testutils/test_output_line.py @@ -89,8 +89,8 @@ def test_output_line_from_message(message: Callable) -> None: @pytest.mark.parametrize("confidence", [HIGH, INFERENCE]) def test_output_line_to_csv(confidence: Confidence, message: Callable) -> None: - """Test that the OutputLine NamedTuple is instantiated correctly with from_msg - and then converted to csv. + """Test that the OutputLine NamedTuple is instantiated correctly with from_msg and + then converted to csv. """ output_line = OutputLine.from_msg(message(confidence), True) csv = output_line.to_csv() @@ -153,6 +153,7 @@ def test_output_line_from_csv_deprecated( confidence: Optional[str], expected_confidence: str ) -> None: """Test that the OutputLine NamedTuple is instantiated correctly with from_csv. + Test OutputLine's of length 5 or 6. """ if confidence: @@ -185,6 +186,7 @@ def test_output_line_from_csv_deprecated( def test_output_line_from_csv() -> None: """Test that the OutputLine NamedTuple is instantiated correctly with from_csv. + Test OutputLine of length 8. """ proper_csv = [ diff --git a/tests/unittest_reporting.py b/tests/unittest_reporting.py index d6cb182206..b414c1c26f 100644 --- a/tests/unittest_reporting.py +++ b/tests/unittest_reporting.py @@ -93,8 +93,9 @@ def test_template_option_end_line(linter) -> None: def test_template_option_non_existing(linter) -> None: """Test the msg-template option with non-existent options. - This makes sure that this option remains backwards compatible as new - parameters do not break on previous versions + + This makes sure that this option remains backwards compatible as new parameters do + not break on previous versions """ output = StringIO() linter.reporter.out = output From e8aeaf3ad0cbcc40f97e5dc03c7d06d7bc6235b0 Mon Sep 17 00:00:00 2001 From: Pierre Sassoulas Date: Thu, 10 Feb 2022 21:27:08 +0100 Subject: [PATCH 2/2] More automated (curated) docformatter formatting --- doc/exts/pylint_messages.py | 9 +- examples/custom_raw.py | 4 +- examples/deprecation_checker.py | 10 +- pylint/__init__.py | 12 +- pylint/checkers/__init__.py | 7 +- pylint/checkers/base.py | 58 +++--- pylint/checkers/base_checker.py | 6 +- pylint/checkers/classes/class_checker.py | 106 ++++++----- .../classes/special_methods_checker.py | 8 +- pylint/checkers/deprecated.py | 7 +- pylint/checkers/design_analysis.py | 17 +- pylint/checkers/ellipsis_checker.py | 9 +- pylint/checkers/exceptions.py | 5 +- pylint/checkers/format.py | 8 +- pylint/checkers/imports.py | 12 +- pylint/checkers/logging.py | 8 +- pylint/checkers/misc.py | 8 +- pylint/checkers/modified_iterating_checker.py | 4 +- pylint/checkers/newstyle.py | 3 +- pylint/checkers/non_ascii_names.py | 4 +- pylint/checkers/raw_metrics.py | 3 +- .../implicit_booleaness_checker.py | 12 +- .../refactoring/recommendation_checker.py | 8 +- .../refactoring/refactoring_checker.py | 74 +++++--- pylint/checkers/similar.py | 98 ++++++---- pylint/checkers/spelling.py | 13 +- pylint/checkers/strings.py | 7 +- pylint/checkers/typecheck.py | 51 ++--- pylint/checkers/unicode.py | 8 +- pylint/checkers/unsupported_version.py | 12 +- pylint/checkers/utils.py | 176 ++++++++++-------- pylint/checkers/variables.py | 114 +++++++----- pylint/config/config_initialization.py | 4 +- pylint/config/configuration_mixin.py | 4 +- pylint/config/find_default_config_files.py | 4 +- pylint/config/option_manager_mixin.py | 10 +- pylint/epylint.py | 4 +- pylint/exceptions.py | 8 +- pylint/extensions/_check_docs_utils.py | 4 +- pylint/extensions/broad_try_clause.py | 5 +- pylint/extensions/code_style.py | 8 +- pylint/extensions/comparetozero.py | 6 +- pylint/extensions/confusing_elif.py | 4 +- pylint/extensions/docparams.py | 20 +- pylint/extensions/emptystring.py | 6 +- pylint/extensions/for_any_all.py | 6 +- pylint/extensions/mccabe.py | 4 +- pylint/extensions/overlapping_exceptions.py | 6 +- pylint/extensions/set_membership.py | 4 +- pylint/extensions/typing.py | 4 +- pylint/lint/expand_modules.py | 8 +- pylint/lint/parallel.py | 17 +- pylint/lint/pylinter.py | 84 +++++---- pylint/lint/run.py | 11 +- pylint/lint/utils.py | 10 +- pylint/message/message_definition.py | 4 +- pylint/message/message_definition_store.py | 10 +- pylint/message/message_id_store.py | 16 +- pylint/pyreverse/diadefslib.py | 4 +- pylint/pyreverse/diagrams.py | 2 +- pylint/pyreverse/dot_printer.py | 4 +- pylint/pyreverse/inspector.py | 1 + pylint/pyreverse/main.py | 2 +- pylint/pyreverse/plantuml_printer.py | 4 +- pylint/pyreverse/printer.py | 9 +- pylint/pyreverse/utils.py | 17 +- pylint/pyreverse/vcg_printer.py | 14 +- pylint/reporters/base_reporter.py | 11 +- pylint/reporters/multi_reporter.py | 7 +- pylint/reporters/reports_handler_mix_in.py | 4 +- pylint/reporters/text.py | 12 +- pylint/reporters/ureports/base_writer.py | 17 +- pylint/reporters/ureports/text_writer.py | 4 +- pylint/testutils/configuration_test.py | 12 +- pylint/testutils/decorator.py | 4 +- .../functional/lint_module_output_update.py | 4 +- pylint/testutils/lint_module_test.py | 3 +- pylint/testutils/output_line.py | 16 +- pylint/testutils/primer.py | 9 +- pylint/testutils/pyreverse.py | 3 +- pylint/typing.py | 4 +- pylint/utils/__init__.py | 4 +- pylint/utils/file_state.py | 10 +- pylint/utils/linterstats.py | 8 +- pylint/utils/utils.py | 22 ++- script/bump_changelog.py | 4 +- tests/benchmark/test_baseline_benchmarks.py | 44 +++-- tests/checkers/unittest_format.py | 11 +- tests/checkers/unittest_non_ascii_name.py | 8 +- tests/checkers/unittest_stdlib.py | 9 +- tests/checkers/unittest_typecheck.py | 4 +- tests/checkers/unittest_unicode/__init__.py | 7 +- .../unittest_unicode/unittest_bad_chars.py | 19 +- .../unittest_bidirectional_unicode.py | 5 +- .../unittest_unicode/unittest_functions.py | 4 +- .../unittest_invalid_encoding.py | 4 +- tests/checkers/unittest_variables.py | 4 +- .../config/test_functional_config_loading.py | 4 +- tests/config/unittest_config.py | 4 +- tests/extensions/test_check_docs_utils.py | 4 +- tests/lint/test_utils.py | 4 +- tests/lint/unittest_lint.py | 4 +- tests/primer/test_primer_external.py | 10 +- tests/pyreverse/test_diadefs.py | 4 +- tests/pyreverse/test_utils.py | 16 +- tests/test_check_parallel.py | 33 +++- tests/test_func.py | 2 +- tests/test_regr.py | 6 +- tests/test_self.py | 23 ++- tests/testutils/data/init_hook.py | 4 +- tests/testutils/test_output_line.py | 14 +- tests/testutils/test_package_to_lint.py | 4 +- tests/unittest_reporting.py | 4 +- 113 files changed, 950 insertions(+), 676 deletions(-) diff --git a/doc/exts/pylint_messages.py b/doc/exts/pylint_messages.py index 1ca3bdb88a..d72a404884 100644 --- a/doc/exts/pylint_messages.py +++ b/doc/exts/pylint_messages.py @@ -50,9 +50,11 @@ def _register_all_checkers_and_extensions(linter: PyLinter) -> None: def _get_all_messages( linter: PyLinter, ) -> Tuple[MessagesDict, OldMessagesDict]: - """Get all messages registered to a linter and return a dictionary indexed by message - type. - Also return a dictionary of old message and the new messages they can be mapped to. + """Get all messages registered to a linter and return a dictionary indexed + by message type. + + Also return a dictionary of old message and the new messages they + can be mapped to. """ messages_dict: MessagesDict = { "fatal": [], @@ -202,6 +204,7 @@ def _write_redirect_pages(old_messages: OldMessagesDict) -> None: # pylint: disable-next=unused-argument def build_messages_pages(app: Optional[Sphinx]) -> None: """Overwrite messages files by printing the documentation to a stream. + Documentation is written in ReST format. """ # Create linter, register all checkers and extensions and get all messages diff --git a/examples/custom_raw.py b/examples/custom_raw.py index 365e9b7fa0..061ecef467 100644 --- a/examples/custom_raw.py +++ b/examples/custom_raw.py @@ -10,8 +10,8 @@ class MyRawChecker(BaseChecker): - """Check for line continuations with '\' instead of using triple - quoted string or parenthesis + """Check for line continuations with '\' instead of using triple quoted + string or parenthesis. """ __implements__ = IRawChecker diff --git a/examples/deprecation_checker.py b/examples/deprecation_checker.py index d336417f47..7229c1f70e 100644 --- a/examples/deprecation_checker.py +++ b/examples/deprecation_checker.py @@ -1,6 +1,6 @@ -"""Example checker detecting deprecated functions/methods. Following example searches -for usages of deprecated function `deprecated_function` and deprecated method -`MyClass.deprecated_method` from module mymodule: +"""Example checker detecting deprecated functions/methods. Following example +searches for usages of deprecated function `deprecated_function` and deprecated +method `MyClass.deprecated_method` from module mymodule: .. code-block:: console $ cat mymodule.py @@ -59,8 +59,8 @@ class DeprecationChecker(DeprecatedMixin, BaseChecker): name = "deprecated" def deprecated_methods(self) -> Set[str]: - """Callback method called by DeprecatedMixin for every method/function found in - the code. + """Callback method called by DeprecatedMixin for every method/function + found in the code. Returns: collections.abc.Container of deprecated function/method names. diff --git a/pylint/__init__.py b/pylint/__init__.py index 02df46054e..cefa1ce87a 100644 --- a/pylint/__init__.py +++ b/pylint/__init__.py @@ -21,7 +21,8 @@ def run_pylint(argv: Optional[Sequence[str]] = None): """Run pylint. - argv can be a sequence of strings normally supplied as arguments on the command line + argv can be a sequence of strings normally supplied as arguments on + the command line """ from pylint.lint import Run as PylintRun @@ -34,7 +35,8 @@ def run_pylint(argv: Optional[Sequence[str]] = None): def run_epylint(argv: Optional[Sequence[str]] = None): """Run epylint. - argv can be a list of strings normally supplied as arguments on the command line + argv can be a list of strings normally supplied as arguments on the + command line """ from pylint.epylint import Run as EpylintRun @@ -44,7 +46,8 @@ def run_epylint(argv: Optional[Sequence[str]] = None): def run_pyreverse(argv: Optional[Sequence[str]] = None): """Run pyreverse. - argv can be a sequence of strings normally supplied as arguments on the command line + argv can be a sequence of strings normally supplied as arguments on + the command line """ from pylint.pyreverse.main import Run as PyreverseRun @@ -54,7 +57,8 @@ def run_pyreverse(argv: Optional[Sequence[str]] = None): def run_symilar(argv: Optional[Sequence[str]] = None): """Run symilar. - argv can be a sequence of strings normally supplied as arguments on the command line + argv can be a sequence of strings normally supplied as arguments on + the command line """ from pylint.checkers.similar import Run as SimilarRun diff --git a/pylint/checkers/__init__.py b/pylint/checkers/__init__.py index e99560faa3..5c9a9d600f 100644 --- a/pylint/checkers/__init__.py +++ b/pylint/checkers/__init__.py @@ -53,7 +53,6 @@ The raw_metrics checker has no number associated since it doesn't emit any messages nor reports. XXX not true, emit a 07 report ! - """ import sys @@ -75,9 +74,9 @@ def table_lines_from_stats( old_stats: Optional[LinterStats], stat_type: Literal["duplicated_lines", "message_types"], ) -> List[str]: - """Get values listed in from and , - and return a formatted list of values, designed to be given to a - ureport.Table object + """Get values listed in from and , and return + a formatted list of values, designed to be given to a ureport.Table + object. """ lines: List[str] = [] if stat_type == "duplicated_lines": diff --git a/pylint/checkers/base.py b/pylint/checkers/base.py index e2a3ece496..050118d382 100644 --- a/pylint/checkers/base.py +++ b/pylint/checkers/base.py @@ -101,9 +101,9 @@ class NamingStyle: - """It may seem counterintuitive that single naming style has multiple "accepted" - forms of regular expressions, but we need to special-case stuff like dunder names - in method names. + """It may seem counterintuitive that single naming style has multiple + "accepted" forms of regular expressions, but we need to special-case stuff + like dunder names in method names. """ ANY: Pattern[str] = re.compile(".*") @@ -231,8 +231,9 @@ class AnyStyle(NamingStyle): def _redefines_import(node): - """Detect that the given node (AssignName) is inside an - exception handler and redefines an import from the tryexcept body. + """Detect that the given node (AssignName) is inside an exception handler + and redefines an import from the tryexcept body. + Returns True if the node redefines an import, False otherwise. """ current = node @@ -267,7 +268,7 @@ def in_loop(node: nodes.NodeNG) -> bool: def in_nested_list(nested_list, obj): """Return true if the object is an element of or of a nested - list + list. """ for elmt in nested_list: if isinstance(elmt, (list, tuple)): @@ -337,8 +338,8 @@ def _is_multi_naming_match(match, node_type, confidence): def _get_properties(config): """Returns a tuple of property classes and names. - Property classes are fully qualified, such as 'abc.abstractproperty' and - property names are the actual names, such as 'abstract_property'. + Property classes are fully qualified, i.e. 'abc.abstractproperty' + and property names are the actual names, i.e. 'abstract_property'. """ property_classes = {BUILTIN_PROPERTY} property_names = set() # Not returning 'property', it has its own check. @@ -390,8 +391,8 @@ def _determine_function_name_type(node: nodes.FunctionDef, config=None): def _has_abstract_methods(node): """Determine if the given `node` has abstract methods. - The methods should be made abstract by decorating them - with `abc` decorators. + The methods should be made abstract by decorating them with `abc` + decorators. """ return len(utils.unimplemented_abstract_methods(node)) > 0 @@ -784,9 +785,7 @@ def visit_nonlocal(self, node: nodes.Nonlocal) -> None: @utils.check_messages("abstract-class-instantiated") def visit_call(self, node: nodes.Call) -> None: - """Check instantiating abstract class with - abc.ABCMeta as metaclass. - """ + """Check instantiating abstract class with abc.ABCMeta as metaclass.""" for inferred in infer_all(node.func): self._check_inferred_class_is_abstract(inferred, node) @@ -1167,8 +1166,8 @@ def visit_module(self, _: nodes.Module) -> None: self.linter.stats.node_count["module"] += 1 def visit_classdef(self, _: nodes.ClassDef) -> None: - """Check module name, docstring and redefinition - increment branch counter + """Check module name, docstring and redefinition increment branch + counter. """ self.linter.stats.node_count["klass"] += 1 @@ -1306,8 +1305,8 @@ def visit_lambda(self, node: nodes.Lambda) -> None: @utils.check_messages("dangerous-default-value") def visit_functiondef(self, node: nodes.FunctionDef) -> None: - """Check function name, docstring, arguments, redefinition, - variable names, max locals + """Check function name, docstring, arguments, redefinition, variable + names, max locals. """ if node.is_method(): self.linter.stats.node_count["method"] += 1 @@ -1370,8 +1369,8 @@ def visit_return(self, node: nodes.Return) -> None: @utils.check_messages("unreachable") def visit_continue(self, node: nodes.Continue) -> None: - """Check is the node has a right sibling (if so, that's some unreachable - code) + """Check is the node has a right sibling (if so, that's some + unreachable code) """ self._check_unreachable(node) @@ -1389,8 +1388,8 @@ def visit_break(self, node: nodes.Break) -> None: @utils.check_messages("unreachable") def visit_raise(self, node: nodes.Raise) -> None: - """Check if the node has a right sibling (if so, that's some unreachable - code) + """Check if the node has a right sibling (if so, that's some + unreachable code) """ self._check_unreachable(node) @@ -1419,7 +1418,8 @@ def _check_misplaced_format_function(self, call_node): "eval-used", "exec-used", "bad-reversed-sequence", "misplaced-format-function" ) def visit_call(self, node: nodes.Call) -> None: - """Visit a Call node -> check if this is not a disallowed builtin + """Visit a Call node -> check if this is not a disallowed builtin. + call and check for * or ** use """ self._check_misplaced_format_function(node) @@ -1494,8 +1494,9 @@ def _check_unreachable(self, node): def _check_not_in_finally(self, node, node_name, breaker_classes=()): """Check that a node is not inside a 'finally' clause of a 'try...finally' statement. - If we find a parent which type is in breaker_classes before - a 'try...finally' block we skip the whole check. + + If we find a parent which type is in breaker_classes before a + 'try...finally' block we skip the whole check. """ # if self._tryfinallys is empty, we're not an in try...finally block if not self._tryfinallys: @@ -2263,8 +2264,8 @@ def visit_pass(self, node: nodes.Pass) -> None: def _is_one_arg_pos_call(call): - """Is this a call with exactly 1 argument, - where that argument is positional? + """Is this a call with exactly 1 argument, where that argument is + positional? """ return isinstance(call, nodes.Call) and len(call.args) == 1 and not call.keywords @@ -2436,7 +2437,9 @@ def _is_nan(node) -> bool: ) def _check_literal_comparison(self, literal, node: nodes.Compare): - """Check if we compare to a literal, which is usually what we do not want to do.""" + """Check if we compare to a literal, which is usually what we do not + want to do. + """ is_other_literal = isinstance(literal, (nodes.List, nodes.Dict, nodes.Set)) is_const = False if isinstance(literal, nodes.Const): @@ -2450,6 +2453,7 @@ def _check_literal_comparison(self, literal, node: nodes.Compare): def _check_logical_tautology(self, node: nodes.Compare): """Check if identifier is compared against itself. + :param node: Compare node :Example: val = 786 diff --git a/pylint/checkers/base_checker.py b/pylint/checkers/base_checker.py index 10e990f935..9f1e0fb94c 100644 --- a/pylint/checkers/base_checker.py +++ b/pylint/checkers/base_checker.py @@ -67,8 +67,10 @@ def __repr__(self): return f"{status} '{self.name}' (responsible for '{msgs}')" def __str__(self): - """This might be incomplete because multiple class inheriting BaseChecker - can have the same name. Cf MessageHandlerMixIn.get_full_documentation() + """This might be incomplete because multiple class inheriting + BaseChecker can have the same name. + + Cf MessageHandlerMixIn.get_full_documentation() """ return self.get_full_documentation( msgs=self.msgs, options=self.options_and_values(), reports=self.reports diff --git a/pylint/checkers/classes/class_checker.py b/pylint/checkers/classes/class_checker.py index dcde9231b0..b677ee9036 100644 --- a/pylint/checkers/classes/class_checker.py +++ b/pylint/checkers/classes/class_checker.py @@ -186,12 +186,12 @@ class _DefaultMissing: def _has_different_parameters_default_value(original, overridden): - """Check if original and overridden methods arguments have different default values. + """Check if original and overridden methods arguments have different + default values. - Return True if one of the overridden arguments has a default - value different from the default value of the original argument - If one of the method doesn't have argument (.args is None) - return False + Return True if one of the overridden arguments has a default value + different from the default value of the original argument If one of + the method doesn't have argument (.args is None) return False """ if original.args is None or overridden.args is None: return False @@ -266,7 +266,6 @@ def _different_parameters( * one of the methods is having variadics, while the other is not * they have different keyword only parameters. - """ output_messages = [] original_parameters = _positional_parameters(original) @@ -354,8 +353,10 @@ def _has_data_descriptor(cls, attr): def _called_in_methods(func, klass, methods): - """Check if the func was called in any of the given methods, - belonging to the *klass*. Returns True if so, False otherwise. + """Check if the func was called in any of the given methods, belonging to + the *klass*. + + Returns True if so, False otherwise. """ if not isinstance(func, nodes.FunctionDef): return False @@ -383,11 +384,10 @@ def _called_in_methods(func, klass, methods): def _is_attribute_property(name, klass): """Check if the given attribute *name* is a property in the given *klass*. - It will look for `property` calls or for functions - with the given name, decorated by `property` or `property` - subclasses. - Returns ``True`` if the name is a property in the given klass, - ``False`` otherwise. + It will look for `property` calls or for functions with the given + name, decorated by `property` or `property` subclasses. Returns + ``True`` if the name is a property in the given klass, ``False`` + otherwise. """ try: @@ -826,8 +826,8 @@ def _check_consistent_mro(self, node): pass def _check_proper_bases(self, node): - """Detect that a class inherits something which is not - a class or a type. + """Detect that a class inherits something which is not a class or a + type. """ for base in node.bases: ancestor = safe_infer(base) @@ -849,7 +849,9 @@ def _check_proper_bases(self, node): ) def _check_typing_final(self, node: nodes.ClassDef) -> None: - """Detect that a class does not subclass a class decorated with `typing.final`.""" + """Detect that a class does not subclass a class decorated with + `typing.final`. + """ if not self._py38_plus: return for base in node.bases: @@ -1173,13 +1175,14 @@ def visit_functiondef(self, node: nodes.FunctionDef) -> None: def _check_useless_super_delegation(self, function): """Check if the given function node is an useless method override. - We consider it *useless* if it uses the super() builtin, but having - nothing additional whatsoever than not implementing the method at all. - If the method uses super() to delegate an operation to the rest of the MRO, - and if the method called is the same as the current one, the arguments - passed to super() are the same as the parameters that were passed to - this method, then the method could be removed altogether, by letting - other implementation to take precedence. + We consider it *useless* if it uses the super() builtin, but + having nothing additional whatsoever than not implementing the + method at all. If the method uses super() to delegate an + operation to the rest of the MRO, and if the method called is + the same as the current one, the arguments passed to super() are + the same as the parameters that were passed to this method, then + the method could be removed altogether, by letting other + implementation to take precedence. """ if ( @@ -1370,7 +1373,9 @@ def _check_redefined_slots( slots_node: nodes.NodeNG, slots_list: List[nodes.NodeNG], ) -> None: - """Check if `node` redefines a slot which is defined in an ancestor class.""" + """Check if `node` redefines a slot which is defined in an ancestor + class. + """ slots_names: List[str] = [] for slot in slots_list: if isinstance(slot, nodes.Const): @@ -1428,8 +1433,8 @@ def _check_slots_elt(self, elt, node): def leave_functiondef(self, node: nodes.FunctionDef) -> None: """On method node, check if this method couldn't be a function. - ignore class, static and abstract methods, initializer, - methods overridden from a parent class. + ignore class, static and abstract methods, initializer, methods + overridden from a parent class. """ if node.is_method(): if node.args.args is not None: @@ -1455,10 +1460,11 @@ def leave_functiondef(self, node: nodes.FunctionDef) -> None: leave_asyncfunctiondef = leave_functiondef def visit_attribute(self, node: nodes.Attribute) -> None: - """Check if the getattr is an access to a class member - if so, register it. Also check for access to protected - class member from outside its class (but ignore __special__ - methods) + """Check if the getattr is an access to a class member if so, register + it. + + Also check for access to protected class member from outside its + class (but ignore __special__ methods) """ # Check self if self._uses_mandatory_method_param(node): @@ -1488,8 +1494,8 @@ def _check_invalid_class_object(self, node: nodes.AssignAttr) -> None: self.add_message("invalid-class-object", node=node) def _check_in_slots(self, node): - """Check that the given AssignAttr node - is defined in the class slots. + """Check that the given AssignAttr node is defined in the class + slots. """ inferred = safe_infer(node.expr) if not isinstance(inferred, astroid.Instance): @@ -1568,11 +1574,10 @@ def visit_assign(self, assign_node: nodes.Assign) -> None: def _check_classmethod_declaration(self, node): """Checks for uses of classmethod() or staticmethod(). - When a @classmethod or @staticmethod decorator should be used instead. - A message will be emitted only if the assignment is at a class scope - and only if the classmethod's argument belongs to the class where it - is defined. - `node` is an assign node. + When a @classmethod or @staticmethod decorator should be used + instead. A message will be emitted only if the assignment is at + a class scope and only if the classmethod's argument belongs to + the class where it is defined. `node` is an assign node. """ if not isinstance(node.value, nodes.Call): return @@ -1608,6 +1613,7 @@ def _check_protected_attribute_access(self, node: nodes.Attribute): """Given an attribute access node (set or get), check if attribute access is legitimate. Call _check_first_attr with node before calling this method. Valid cases are: + * self._attr in a method or cls._attr in a classmethod. Checked by _check_first_attr. * Klass._attr inside "Klass" class. @@ -1699,7 +1705,9 @@ def _check_protected_attribute_access(self, node: nodes.Attribute): @staticmethod def _is_called_inside_special_method(node: nodes.NodeNG) -> bool: - """Returns true if the node is located inside a special (aka dunder) method.""" + """Returns true if the node is located inside a special (aka dunder) + method. + """ frame_name = node.frame(future=True).name return frame_name and frame_name in PYMETHODS @@ -1722,7 +1730,9 @@ def _is_classmethod(func): @staticmethod def _is_inferred_instance(expr, klass): - """Check if the inferred value of the given *expr* is an instance of *klass*.""" + """Check if the inferred value of the given *expr* is an instance of + *klass*. + """ inferred = safe_infer(expr) if not isinstance(inferred, astroid.Instance): @@ -1732,7 +1742,8 @@ def _is_inferred_instance(expr, klass): @staticmethod def _is_class_attribute(name, klass): - """Check if the given attribute *name* is a class or instance member of the given *klass*. + """Check if the given attribute *name* is a class or instance member of + the given *klass*. Returns ``True`` if the name is a property in the given klass, ``False`` otherwise. @@ -1751,8 +1762,8 @@ def _is_class_attribute(name, klass): return False def visit_name(self, node: nodes.Name) -> None: - """Check if the name handle an access to a class member - if so, register it + """Check if the name handle an access to a class member if so, register + it. """ if self._first_attrs and ( node.name == self._first_attrs[-1] or not self._first_attrs[-1] @@ -1897,7 +1908,7 @@ def _check_first_arg_config(self, first, config, node, message, method_name): def _check_bases_classes(self, node): """Check that the given class node implements abstract methods from - base classes + base classes. """ def is_abstract(method): @@ -2086,14 +2097,16 @@ def _check_signature(self, method1, refmethod, class_type, cls): def _uses_mandatory_method_param(self, node): """Check that attribute lookup name use first attribute variable name. - Name is `self` for method, `cls` for classmethod and `mcs` for metaclass. + Name is `self` for method, `cls` for classmethod and `mcs` for + metaclass. """ return self._is_mandatory_method_param(node.expr) def _is_mandatory_method_param(self, node: nodes.NodeNG) -> bool: """Check if nodes.Name corresponds to first attribute variable name. - Name is `self` for method, `cls` for classmethod and `mcs` for metaclass. + Name is `self` for method, `cls` for classmethod and `mcs` for + metaclass. """ if self._first_attrs: first_attr = self._first_attrs[-1] @@ -2114,7 +2127,8 @@ def _ancestors_to_call( klass_node: nodes.ClassDef, method="__init__" ) -> Dict[nodes.ClassDef, bases.UnboundMethod]: """Return a dictionary where keys are the list of base classes providing - the queried method, and so that should/may be called from the method node + the queried method, and so that should/may be called from the method + node. """ to_call: Dict[nodes.ClassDef, bases.UnboundMethod] = {} for base_node in klass_node.ancestors(recurs=False): diff --git a/pylint/checkers/classes/special_methods_checker.py b/pylint/checkers/classes/special_methods_checker.py index 7005abd8d4..4dd7f6b224 100644 --- a/pylint/checkers/classes/special_methods_checker.py +++ b/pylint/checkers/classes/special_methods_checker.py @@ -23,8 +23,8 @@ def _safe_infer_call_result(node, caller, context=None): """Safely infer the return value of a function. - Returns None if inference failed or if there is some ambiguity (more than one node - has been inferred). Otherwise, returns inferred value. + Returns None if inference failed or if there is some ambiguity (more + than one node has been inferred). Otherwise, returns inferred value. """ try: inferit = node.infer_call_result(caller, context=context) @@ -43,8 +43,8 @@ def _safe_infer_call_result(node, caller, context=None): class SpecialMethodsChecker(BaseChecker): - """Checker which verifies that special methods - are implemented correctly. + """Checker which verifies that special methods are implemented + correctly. """ __implements__ = (IAstroidChecker,) diff --git a/pylint/checkers/deprecated.py b/pylint/checkers/deprecated.py index dc0713a89e..cadbb1c65b 100644 --- a/pylint/checkers/deprecated.py +++ b/pylint/checkers/deprecated.py @@ -22,6 +22,7 @@ class DeprecatedMixin(BaseChecker): """A mixin implementing logic for checking deprecated symbols. + A class implementing mixin must define "deprecated-method" Message. """ @@ -180,8 +181,10 @@ def check_deprecated_module(self, node, mod_path): self.add_message("deprecated-module", node=node, args=mod_path) def check_deprecated_method(self, node, inferred): - """Executes the checker for the given node. This method should - be called from the checker implementing this mixin. + """Executes the checker for the given node. + + This method should be called from the checker implementing this + mixin. """ # Reject nodes which aren't of interest to us. diff --git a/pylint/checkers/design_analysis.py b/pylint/checkers/design_analysis.py index 94569d4ea8..592f3283f1 100644 --- a/pylint/checkers/design_analysis.py +++ b/pylint/checkers/design_analysis.py @@ -225,7 +225,8 @@ def _is_exempt_from_public_methods(node: astroid.ClassDef) -> bool: def _count_boolean_expressions(bool_op): - """Counts the number of boolean expressions in BoolOp `bool_op` (recursive). + """Counts the number of boolean expressions in BoolOp `bool_op` + (recursive). example: a and (b or c or (d and e)) ==> 5 boolean expressions """ @@ -452,7 +453,9 @@ def _ignored_argument_names(self): "too-many-public-methods", ) def visit_classdef(self, node: nodes.ClassDef) -> None: - """Check size of inheritance hierarchy and number of instance attributes.""" + """Check size of inheritance hierarchy and number of instance + attributes. + """ parents = _get_parents( node, STDLIB_CLASSES_IGNORE_ANCESTOR.union(self.config.ignored_parents) ) @@ -526,8 +529,8 @@ def leave_classdef(self, node: nodes.ClassDef) -> None: "keyword-arg-before-vararg", ) def visit_functiondef(self, node: nodes.FunctionDef) -> None: - """Check function name, docstring, arguments, redefinition, - variable names, max locals + """Check function name, docstring, arguments, redefinition, variable + names, max locals. """ # init branch and returns counters self._returns.append(0) @@ -605,7 +608,7 @@ def visit_return(self, _: nodes.Return) -> None: def visit_default(self, node: nodes.NodeNG) -> None: """Default visit method -> increments the statements counter if - necessary + necessary. """ if node.is_statement: self._inc_all_stmts(1) @@ -637,8 +640,8 @@ def visit_if(self, node: nodes.If) -> None: self._inc_all_stmts(branches) def _check_boolean_expressions(self, node): - """Go through "if" node `node` and count its boolean expressions - if the 'if' node test is a BoolOp node + """Go through "if" node `node` and count its boolean expressions if the + 'if' node test is a BoolOp node. """ condition = node.test if not isinstance(condition, astroid.BoolOp): diff --git a/pylint/checkers/ellipsis_checker.py b/pylint/checkers/ellipsis_checker.py index f29ebed042..3faf2ce2bf 100644 --- a/pylint/checkers/ellipsis_checker.py +++ b/pylint/checkers/ellipsis_checker.py @@ -28,11 +28,12 @@ class EllipsisChecker(BaseChecker): @check_messages("unnecessary-ellipsis") def visit_const(self, node: nodes.Const) -> None: """Check if the ellipsis constant is used unnecessarily. + Emit a warning when: - - A line consisting of an ellipsis is preceded by a docstring. - - A statement exists in the same scope as the ellipsis. - For example: A function consisting of an ellipsis followed by a - return statement on the next line. + - A line consisting of an ellipsis is preceded by a docstring. + - A statement exists in the same scope as the ellipsis. + For example: A function consisting of an ellipsis followed by a + return statement on the next line. """ if ( node.pytype() == "builtins.Ellipsis" diff --git a/pylint/checkers/exceptions.py b/pylint/checkers/exceptions.py index 9960b8d32e..35d8de7629 100644 --- a/pylint/checkers/exceptions.py +++ b/pylint/checkers/exceptions.py @@ -58,9 +58,10 @@ def predicate(obj): def _annotated_unpack_infer(stmt, context=None): """Recursively generate nodes inferred by the given statement. + If the inferred value is a list or a tuple, recurse on the elements. - Returns an iterator which yields tuples in the format - ('original node', 'inferred node'). + Returns an iterator which yields tuples in the format ('original + node', 'inferred node'). """ if isinstance(stmt, (nodes.List, nodes.Tuple)): for elt in stmt.elts: diff --git a/pylint/checkers/format.py b/pylint/checkers/format.py index 799d97cbc5..0ee4360ca5 100644 --- a/pylint/checkers/format.py +++ b/pylint/checkers/format.py @@ -669,7 +669,9 @@ def _check_multi_statement_line(self, node, line): self._visited_lines[line] = 2 def check_line_ending(self, line: str, i: int) -> None: - """Check that the final newline is not missing and that there is no trailing whitespace.""" + """Check that the final newline is not missing and that there is no + trailing whitespace. + """ if not line.endswith("\n"): self.add_message("missing-final-newline", line=i) return @@ -715,7 +717,9 @@ def is_line_length_check_activated(pylint_pattern_match_object) -> bool: @staticmethod def specific_splitlines(lines: str) -> List[str]: - """Split lines according to universal newlines except those in a specific sets.""" + """Split lines according to universal newlines except those in a + specific sets. + """ unsplit_ends = { "\v", "\x0b", diff --git a/pylint/checkers/imports.py b/pylint/checkers/imports.py index bd368a2c59..60997639c1 100644 --- a/pylint/checkers/imports.py +++ b/pylint/checkers/imports.py @@ -141,7 +141,7 @@ def _ignore_import_failure(node, modname, ignored_modules): def _make_tree_defs(mod_files_list): """Get a list of 2-uple (module, list_of_files_which_import_this_module), - it will return a dictionary to represent this as a tree + it will return a dictionary to represent this as a tree. """ tree_defs = {} for mod, files in mod_files_list: @@ -195,7 +195,7 @@ def _make_graph( filename: str, dep_info: Dict[str, Set[str]], sect: Section, gtype: str ): """Generate a dependencies graph and add some information about it in the - report's section + report's section. """ outputfile = _dependencies_graph(filename, dep_info) sect.append(Paragraph((f"{gtype}imports graph has been written to {outputfile}",))) @@ -964,15 +964,15 @@ def _filter_dependencies_graph(self, internal): @astroid.decorators.cached def _external_dependencies_info(self): - """Return cached external dependencies information or build and - cache them + """Return cached external dependencies information or build and cache + them. """ return self._filter_dependencies_graph(internal=False) @astroid.decorators.cached def _internal_dependencies_info(self): - """Return cached internal dependencies information or build and - cache them + """Return cached internal dependencies information or build and cache + them. """ return self._filter_dependencies_graph(internal=True) diff --git a/pylint/checkers/logging.py b/pylint/checkers/logging.py index aedaf62415..a3e7b5e7e4 100644 --- a/pylint/checkers/logging.py +++ b/pylint/checkers/logging.py @@ -276,7 +276,9 @@ def _check_log_method(self, node, name): ) def _helper_string(self, node): - """Create a string that lists the valid types of formatting for this node.""" + """Create a string that lists the valid types of formatting for this + node. + """ valid_types = ["lazy %"] if not self.linter.is_message_enabled( @@ -371,7 +373,9 @@ def _check_format_string(self, node, format_arg): def is_complex_format_str(node: nodes.NodeNG) -> bool: - """Return whether the node represents a string with complex formatting specs.""" + """Return whether the node represents a string with complex formatting + specs. + """ inferred = utils.safe_infer(node) if inferred is None or not ( isinstance(inferred, nodes.Const) and isinstance(inferred.value, str) diff --git a/pylint/checkers/misc.py b/pylint/checkers/misc.py index 69149e61a9..c0b4b3d71f 100644 --- a/pylint/checkers/misc.py +++ b/pylint/checkers/misc.py @@ -44,7 +44,9 @@ class ByIdManagedMessagesChecker(BaseChecker): - """Checks for messages that are enabled or disabled by id instead of symbol.""" + """Checks for messages that are enabled or disabled by id instead of + symbol. + """ __implements__ = IRawChecker name = "miscellaneous" @@ -64,7 +66,9 @@ def _get_by_id_managed_msgs(self) -> List[ManagedMessage]: return self.linter._by_id_managed_msgs def process_module(self, node: nodes.Module) -> None: - """Inspect the source file to find messages activated or deactivated by id.""" + """Inspect the source file to find messages activated or deactivated by + id. + """ managed_msgs = self._get_by_id_managed_msgs() for (mod_name, msgid, symbol, lineno, is_disabled) in managed_msgs: if mod_name == node.name: diff --git a/pylint/checkers/modified_iterating_checker.py b/pylint/checkers/modified_iterating_checker.py index 711d37bb0e..1a38bb4a0a 100644 --- a/pylint/checkers/modified_iterating_checker.py +++ b/pylint/checkers/modified_iterating_checker.py @@ -62,7 +62,9 @@ def visit_for(self, node: nodes.For) -> None: def _modified_iterating_check_on_node_and_children( self, body_node: nodes.NodeNG, iter_obj: nodes.NodeNG ) -> None: - """See if node or any of its children raises modified iterating messages.""" + """See if node or any of its children raises modified iterating + messages. + """ self._modified_iterating_check(body_node, iter_obj) for child in body_node.get_children(): self._modified_iterating_check_on_node_and_children(child, iter_obj) diff --git a/pylint/checkers/newstyle.py b/pylint/checkers/newstyle.py index 00fa0748e3..eb8473d09f 100644 --- a/pylint/checkers/newstyle.py +++ b/pylint/checkers/newstyle.py @@ -45,7 +45,8 @@ class NewStyleConflictChecker(BaseChecker): """Checks for usage of new style capabilities on old style classes and - other new/old styles conflicts problems + other new/old styles conflicts problems. + * use of property, __slots__, super * "super" usage """ diff --git a/pylint/checkers/non_ascii_names.py b/pylint/checkers/non_ascii_names.py index d9b3bfd505..e71ad6d758 100644 --- a/pylint/checkers/non_ascii_names.py +++ b/pylint/checkers/non_ascii_names.py @@ -2,8 +2,8 @@ # Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html # For details: https://github.com/PyCQA/pylint/blob/main/LICENSE -"""All alphanumeric unicode character are allowed in Python but due -to similarities in how they look they can be confused. +"""All alphanumeric unicode character are allowed in Python but due to +similarities in how they look they can be confused. See: https://www.python.org/dev/peps/pep-0672/#confusable-characters-in-identifiers diff --git a/pylint/checkers/raw_metrics.py b/pylint/checkers/raw_metrics.py index 84e6c23c47..358a902fc9 100644 --- a/pylint/checkers/raw_metrics.py +++ b/pylint/checkers/raw_metrics.py @@ -60,7 +60,8 @@ def report_raw_stats( class RawMetricsChecker(BaseTokenChecker): - """Does not check anything but gives some raw metrics : + """Does not check anything but gives some raw metrics :. + * total number of lines * total number of code lines * total number of docstring lines diff --git a/pylint/checkers/refactoring/implicit_booleaness_checker.py b/pylint/checkers/refactoring/implicit_booleaness_checker.py index 7d9bcb674a..0aa3c71584 100644 --- a/pylint/checkers/refactoring/implicit_booleaness_checker.py +++ b/pylint/checkers/refactoring/implicit_booleaness_checker.py @@ -127,8 +127,9 @@ def instance_has_bool(class_def: nodes.ClassDef) -> bool: @utils.check_messages("use-implicit-booleaness-not-len") def visit_unaryop(self, node: nodes.UnaryOp) -> None: - """`not len(S)` must become `not S` regardless if the parent block is a test - condition or something else (boolean expression) e.g. `if not len(S):` + """`not len(S)` must become `not S` regardless if the parent block is a + test condition or something else (boolean expression) e.g. `if not + len(S):` """ if ( isinstance(node, nodes.UnaryOp) @@ -144,7 +145,9 @@ def visit_compare(self, node: nodes.Compare) -> None: def _check_use_implicit_booleaness_not_comparison( self, node: nodes.Compare ) -> None: - """Check for left side and right side of the node for empty literals.""" + """Check for left side and right side of the node for empty + literals. + """ is_left_empty_literal = utils.is_base_container( node.left ) or utils.is_empty_dict_literal(node.left) @@ -211,7 +214,8 @@ def _check_use_implicit_booleaness_not_comparison( def base_names_of_instance( node: Union[bases.Uninferable, bases.Instance] ) -> List[str]: - """Return all names inherited by a class instance or those returned by a function. + """Return all names inherited by a class instance or those returned by + a function. The inherited names include 'object'. """ diff --git a/pylint/checkers/refactoring/recommendation_checker.py b/pylint/checkers/refactoring/recommendation_checker.py index e4d6bc225d..3f250b8e7c 100644 --- a/pylint/checkers/refactoring/recommendation_checker.py +++ b/pylint/checkers/refactoring/recommendation_checker.py @@ -102,8 +102,8 @@ def _check_consider_iterating_dictionary(self, node: nodes.Call) -> None: self.add_message("consider-iterating-dictionary", node=node) def _check_use_maxsplit_arg(self, node: nodes.Call) -> None: - """Add message when accessing first or last elements of a str.split() or - str.rsplit(). + """Add message when accessing first or last elements of a str.split() + or str.rsplit(). """ # Check if call is split() or rsplit() @@ -338,8 +338,8 @@ def visit_const(self, node: nodes.Const) -> None: self._detect_replacable_format_call(node) def _detect_replacable_format_call(self, node: nodes.Const) -> None: - """Check whether a string is used in a call to format() or '%' and whether it - can be replaced by an f-string. + """Check whether a string is used in a call to format() or '%' and + whether it can be replaced by an f-string. """ if ( isinstance(node.parent, nodes.Attribute) diff --git a/pylint/checkers/refactoring/refactoring_checker.py b/pylint/checkers/refactoring/refactoring_checker.py index 198060f9de..1f5cd72af0 100644 --- a/pylint/checkers/refactoring/refactoring_checker.py +++ b/pylint/checkers/refactoring/refactoring_checker.py @@ -118,8 +118,8 @@ def _is_a_return_statement(node: nodes.Call) -> bool: def _is_part_of_with_items(node: nodes.Call) -> bool: - """Checks if one of the node's parents is a ``nodes.With`` node and that the node - itself is located somewhere under its ``items``. + """Checks if one of the node's parents is a ``nodes.With`` node and that + the node itself is located somewhere under its ``items``. """ frame = node.frame(future=True) current = node @@ -133,8 +133,9 @@ def _is_part_of_with_items(node: nodes.Call) -> bool: def _will_be_released_automatically(node: nodes.Call) -> bool: - """Checks if a call that could be used in a ``with`` statement is used in an - alternative construct which would ensure that its __exit__ method is called. + """Checks if a call that could be used in a ``with`` statement is used in + an alternative construct which would ensure that its __exit__ method is + called. """ callables_taking_care_of_exit = frozenset( ( @@ -151,8 +152,8 @@ def _will_be_released_automatically(node: nodes.Call) -> bool: class ConsiderUsingWithStack(NamedTuple): - """Stack for objects that may potentially trigger a R1732 message if they are not - used in a ``with`` block later on. + """Stack for objects that may potentially trigger a R1732 message if they + are not used in a ``with`` block later on. """ module_scope: Dict[str, nodes.NodeNG] = {} @@ -181,9 +182,9 @@ def clear_all(self) -> None: class RefactoringChecker(checkers.BaseTokenChecker): """Looks for code which can be refactored. - This checker also mixes the astroid and the token approaches in order to create - knowledge about whether an "else if" node is a true "else if" node, or an "elif" - node. + This checker also mixes the astroid and the token approaches in + order to create knowledge about whether an "else if" node is a true + "else if" node, or an "elif" node. """ __implements__ = (interfaces.ITokenChecker, interfaces.IAstroidChecker) @@ -488,9 +489,9 @@ def _is_bool_const(node): def _is_actual_elif(self, node): """Check if the given node is an actual elif. - This is a problem we're having with the builtin ast module, which splits `elif` - branches into a separate if statement. Unfortunately we need to know the exact - type in certain cases. + This is a problem we're having with the builtin ast module, + which splits `elif` branches into a separate if statement. + Unfortunately we need to know the exact type in certain cases. """ if isinstance(node.parent, nodes.If): orelse = node.parent.orelse @@ -503,10 +504,11 @@ def _is_actual_elif(self, node): def _check_simplifiable_if(self, node): """Check if the given if node can be simplified. - The if statement can be reduced to a boolean expression in some cases. For - instance, if there are two branches and both of them return a boolean value that - depends on the result of the statement's test, then this can be reduced to - `bool(test)` without losing any functionality. + The if statement can be reduced to a boolean expression in some + cases. For instance, if there are two branches and both of them + return a boolean value that depends on the result of the + statement's test, then this can be reduced to `bool(test)` + without losing any functionality. """ if self._is_actual_elif(node): @@ -766,7 +768,9 @@ def visit_if(self, node: nodes.If) -> None: self._check_consider_using_min_max_builtin(node) def _check_consider_using_min_max_builtin(self, node: nodes.If): - """Check if the given if node can be refactored as a min/max python builtin.""" + """Check if the given if node can be refactored as a min/max python + builtin. + """ if self._is_actual_elif(node) or node.orelse: # Not interested in if statements with multiple branches. return @@ -908,7 +912,9 @@ def visit_raise(self, node: nodes.Raise) -> None: self._check_stop_iteration_inside_generator(node) def _check_stop_iteration_inside_generator(self, node): - """Check if an exception of type StopIteration is raised inside a generator.""" + """Check if an exception of type StopIteration is raised inside a + generator. + """ frame = node.frame(future=True) if not isinstance(frame, nodes.FunctionDef) or not frame.is_generator(): return @@ -924,7 +930,9 @@ def _check_stop_iteration_inside_generator(self, node): @staticmethod def _check_exception_inherit_from_stopiteration(exc): - """Return True if the exception node in argument inherit from StopIteration.""" + """Return True if the exception node in argument inherit from + StopIteration. + """ stopiteration_qname = f"{utils.EXCEPTIONS_MODULE}.StopIteration" return any(_class.qname() == stopiteration_qname for _class in exc.mro()) @@ -1048,7 +1056,8 @@ def _check_super_with_arguments(self, node): self.add_message("super-with-arguments", node=node) def _check_raising_stopiteration_in_generator_next_call(self, node): - """Check if a StopIteration exception is raised by the call to next function. + """Check if a StopIteration exception is raised by the call to next + function. If the next value has a default value, then do not add message. @@ -1299,8 +1308,8 @@ def _apply_boolean_simplification_rules(operator, values): def _simplify_boolean_operation(self, bool_op): """Attempts to simplify a boolean operation. - Recursively applies simplification on the operator terms, and keeps track of - whether reductions have been made. + Recursively applies simplification on the operator terms, and + keeps track of whether reductions have been made. """ children = list(bool_op.get_children()) intermediate = [ @@ -1321,8 +1330,8 @@ def _simplify_boolean_operation(self, bool_op): def _check_simplifiable_condition(self, node): """Check if a boolean condition can be simplified. - Variables will not be simplified, even in the value can be inferred, and - expressions like '3 + 4' will remain expanded. + Variables will not be simplified, even in the value can be + inferred, and expressions like '3 + 4' will remain expanded. """ if not utils.is_test_condition(node): return @@ -1635,10 +1644,11 @@ def _check_unnecessary_comprehension(self, node: nodes.Comprehension) -> None: @staticmethod def _is_and_or_ternary(node): - """Returns true if node is 'condition and true_value or false_value' form. + """Returns true if node is 'condition and true_value or false_value' + form. - All of: condition, true_value and false_value should not be a complex boolean - expression + All of: condition, true_value and false_value should not be a + complex boolean expression """ return ( isinstance(node, nodes.BoolOp) @@ -1796,7 +1806,9 @@ def _is_node_return_ended(self, node: nodes.NodeNG) -> bool: @staticmethod def _has_return_in_siblings(node: nodes.NodeNG) -> bool: - """Returns True if there is at least one return in the node's siblings.""" + """Returns True if there is at least one return in the node's + siblings. + """ next_sibling = node.next_sibling() while next_sibling: if isinstance(next_sibling, nodes.Return): @@ -1826,9 +1838,9 @@ def _is_function_def_never_returning(self, node: nodes.FunctionDef) -> bool: return False def _check_return_at_the_end(self, node): - """Check for presence of a *single* return statement at the end of a function. - "return" or "return None" are useless because None is the default return type if - they are missing. + """Check for presence of a *single* return statement at the end of a + function. "return" or "return None" are useless because None is the + default return type if they are missing. NOTE: produces a message only if there is a single return statement in the function body. Otherwise _check_consistent_returns() is called! diff --git a/pylint/checkers/similar.py b/pylint/checkers/similar.py index 113b086bc7..1e682d23d9 100644 --- a/pylint/checkers/similar.py +++ b/pylint/checkers/similar.py @@ -29,17 +29,23 @@ """A similarities / code duplication command line tool and pylint checker. -The algorithm is based on comparing the hash value of n successive lines of a file. -First the files are read and any line that doesn't fulfill requirement are removed (comments, docstrings...) -Those stripped lines are stored in the LineSet class which gives access to them. -Then each index of the stripped lines collection is associated with the hash of n successive entries of the stripped lines starting at the current index -(n is the minimum common lines option). -The common hashes between both linesets are then looked for. If there are matches, then the match indices in both linesets are stored and associated -with the corresponding couples (start line number/end line number) in both files. -This association is then postprocessed to handle the case of successive matches. For example if the minimum common lines setting is set to four, then -the hashes are computed with four lines. If one of match indices couple (12, 34) is the successor of another one (11, 33) then it means that there are -in fact five lines which are common. -Once postprocessed the values of association table are the result looked for, i.e start and end lines numbers of common lines in both files. +The algorithm is based on comparing the hash value of n successive lines +of a file. First the files are read and any line that doesn't fulfill +requirement are removed (comments, docstrings...) Those stripped lines +are stored in the LineSet class which gives access to them. Then each +index of the stripped lines collection is associated with the hash of n +successive entries of the stripped lines starting at the current index +(n is the minimum common lines option). The common hashes between both +linesets are then looked for. If there are matches, then the match +indices in both linesets are stored and associated with the +corresponding couples (start line number/end line number) in both files. +This association is then postprocessed to handle the case of successive +matches. For example if the minimum common lines setting is set to four, +then the hashes are computed with four lines. If one of match indices +couple (12, 34) is the successor of another one (11, 33) then it means +that there are in fact five lines which are common. Once postprocessed +the values of association table are the result looked for, i.e start and +end lines numbers of common lines in both files. """ import copy import functools @@ -109,9 +115,9 @@ class LineSpecifs(NamedTuple): class CplSuccessiveLinesLimits: - """This class holds a couple of SuccessiveLinesLimits objects, one for each file compared, - and a counter on the number of common lines between both stripped lines collections extracted - from both files + """This class holds a couple of SuccessiveLinesLimits objects, one for each + file compared, and a counter on the number of common lines between both + stripped lines collections extracted from both files. """ __slots__ = ("first_file", "second_file", "effective_cmn_lines_nb") @@ -133,7 +139,9 @@ def __init__( class LinesChunk: - """The LinesChunk object computes and stores the hash of some consecutive stripped lines of a lineset.""" + """The LinesChunk object computes and stores the hash of some consecutive + stripped lines of a lineset. + """ __slots__ = ("_fileid", "_index", "_hash") @@ -230,10 +238,10 @@ def increment(self, value: Index) -> "LineSetStartCouple": def hash_lineset( lineset: "LineSet", min_common_lines: int = DEFAULT_MIN_SIMILARITY_LINE ) -> Tuple[HashToIndex_T, IndexToLines_T]: - """Return two dicts. The first associates the hash of successive stripped lines of a lineset - to the indices of the starting lines. - The second dict, associates the index of the starting line in the lineset's stripped lines to the - couple [start, end] lines number in the corresponding file. + """Return two dicts. The first associates the hash of successive stripped + lines of a lineset to the indices of the starting lines. The second dict, + associates the index of the starting line in the lineset's stripped lines + to the couple [start, end] lines number in the corresponding file. :param lineset: lineset object (i.e the lines in a file) :param min_common_lines: number of successive lines that are used to compute the hash @@ -318,9 +326,10 @@ def filter_noncode_lines( stindex_2: Index, common_lines_nb: int, ) -> int: - """Return the effective number of common lines between lineset1 and lineset2 filtered from non code lines, that is to say the number of - common successive stripped lines except those that do not contain code (for example a ligne with only an - ending parathensis) + """Return the effective number of common lines between lineset1 and + lineset2 filtered from non code lines, that is to say the number of common + successive stripped lines except those that do not contain code (for + example a ligne with only an ending parathensis) :param ls_1: first lineset :param stindex_1: first lineset starting index @@ -471,12 +480,15 @@ def _find_common( ) -> Generator[Commonality, None, None]: """Find similarities in the two given linesets. - This the core of the algorithm. - The idea is to compute the hashes of a minimal number of successive lines of each lineset and then compare the hashes. - Every match of such comparison is stored in a dict that links the couple of starting indices in both linesets to - the couple of corresponding starting and ending lines in both files. - Last regroups all successive couples in a bigger one. It allows to take into account common chunk of lines that have more - than the minimal number of successive lines required. + This the core of the algorithm. The idea is to compute the + hashes of a minimal number of successive lines of each lineset + and then compare the hashes. Every match of such comparison is + stored in a dict that links the couple of starting indices in + both linesets to the couple of corresponding starting and ending + lines in both files. Last regroups all successive couples in a + bigger one. It allows to take into account common chunk of lines + that have more than the minimal number of successive lines + required. """ hash_to_index_1: HashToIndex_T hash_to_index_2: HashToIndex_T @@ -536,7 +548,7 @@ def _find_common( def _iter_sims(self) -> Generator[Commonality, None, None]: """Iterate on similarities among all files, by making a cartesian - product + product. """ for idx, lineset in enumerate(self.linesets[:-1]): for lineset2 in self.linesets[idx + 1 :]: @@ -545,8 +557,8 @@ def _iter_sims(self) -> Generator[Commonality, None, None]: def get_map_data(self): """Returns the data we can use for a map/reduce process. - In this case we are returning this instance's Linesets, that is all file - information that will later be used for vectorisation. + In this case we are returning this instance's Linesets, that is + all file information that will later be used for vectorisation. """ return self.linesets @@ -565,7 +577,8 @@ def stripped_lines( ignore_imports: bool, ignore_signatures: bool, ) -> List[LineSpecifs]: - """Return tuples of line/line number/line type with leading/trailing whitespace and any ignored code features removed. + """Return tuples of line/line number/line type with leading/trailing + whitespace and any ignored code features removed. :param lines: a collection of lines :param ignore_comments: if true, any comment in the lines collection is removed from the result @@ -593,7 +606,9 @@ def stripped_lines( def _get_functions( functions: List[nodes.NodeNG], tree: nodes.NodeNG ) -> List[nodes.NodeNG]: - """Recursively get all functions including nested in the classes from the tree.""" + """Recursively get all functions including nested in the classes + from the tree. + """ for node in tree.body: if isinstance(node, (nodes.FunctionDef, nodes.AsyncFunctionDef)): @@ -656,8 +671,10 @@ def _get_functions( @functools.total_ordering class LineSet: """Holds and indexes all the lines of a single source file. - Allows for correspondence between real lines of the source file and stripped ones, which - are the real ones from which undesired patterns have been removed. + + Allows for correspondence between real lines of the source file and + stripped ones, which are the real ones from which undesired patterns + have been removed. """ def __init__( @@ -728,9 +745,10 @@ def report_similarities( # wrapper to get a pylint checker from the similar class class SimilarChecker(BaseChecker, Similar, MapReduceMixin): - """Checks for similarities and duplicated code. This computation may be - memory / CPU intensive, so you should disable it if you experiment some - problems. + """Checks for similarities and duplicated code. + + This computation may be memory / CPU intensive, so you should + disable it if you experiment some problems. """ __implements__ = (IRawChecker,) @@ -842,7 +860,9 @@ def process_module(self, node: nodes.Module) -> None: self.append_stream(self.linter.current_name, stream, node.file_encoding) # type: ignore[arg-type] def close(self): - """Compute and display similarities on closing (i.e. end of parsing).""" + """Compute and display similarities on closing (i.e. end of + parsing). + """ total = sum(len(lineset) for lineset in self.linesets) duplicated = 0 stats = self.linter.stats diff --git a/pylint/checkers/spelling.py b/pylint/checkers/spelling.py index 8c004444e6..3aa56c9a5e 100644 --- a/pylint/checkers/spelling.py +++ b/pylint/checkers/spelling.py @@ -111,9 +111,8 @@ def _skip(self, word): class RegExFilter(Filter): """Parent class for filters using regular expressions. - This filter skips any words the match the expression - assigned to the class attribute ``_pattern``. - + This filter skips any words the match the expression assigned to the + class attribute ``_pattern``. """ _pattern: Pattern[str] @@ -146,7 +145,9 @@ class SphinxDirectives(RegExFilter): class ForwardSlashChunker(Chunker): - """This chunker allows splitting words like 'before/after' into 'before' and 'after'.""" + """This chunker allows splitting words like 'before/after' into 'before' + and 'after'. + """ def next(self): while True: @@ -190,8 +191,8 @@ def _next(self): def _strip_code_flanked_in_backticks(line: str) -> str: """Alter line so code flanked in backticks is ignored. - Pyenchant automatically strips backticks when parsing tokens, - so this cannot be done at the individual filter level. + Pyenchant automatically strips backticks when parsing tokens, so + this cannot be done at the individual filter level. """ def replace_code_but_leave_surrounding_characters(match_obj) -> str: diff --git a/pylint/checkers/strings.py b/pylint/checkers/strings.py index fd44289e04..1463a9c197 100644 --- a/pylint/checkers/strings.py +++ b/pylint/checkers/strings.py @@ -256,8 +256,8 @@ def arg_matches_format_type(arg_type, format_type): class StringFormatChecker(BaseChecker): - """Checks string formatting operations to ensure that the format string is valid and - the arguments match the format string. + """Checks string formatting operations to ensure that the format string is + valid and the arguments match the format string. """ __implements__ = (IAstroidChecker,) @@ -932,7 +932,8 @@ def register(linter: "PyLinter") -> None: def str_eval(token): - """Mostly replicate `ast.literal_eval(token)` manually to avoid any performance hit. + """Mostly replicate `ast.literal_eval(token)` manually to avoid any + performance hit. This supports f-strings, contrary to `ast.literal_eval`. We have to support all string literal notations: diff --git a/pylint/checkers/typecheck.py b/pylint/checkers/typecheck.py index 7b2cab10d5..fb731d6555 100644 --- a/pylint/checkers/typecheck.py +++ b/pylint/checkers/typecheck.py @@ -235,8 +235,8 @@ def _string_distance(seq1, seq2): def _similar_names(owner, attrname, distance_threshold, max_choices): """Given an owner and a name, try to find similar names. - The similar names are searched given a distance metric and only - a given number of choices will be returned. + The similar names are searched given a distance metric and only a + given number of choices will be returned. """ possible_names = [] names = _node_names(owner) @@ -667,12 +667,11 @@ def _no_context_variadic(node, variadic_name, variadic_type, variadics): """Verify if the given call node has variadic nodes without context. This is a workaround for handling cases of nested call functions - which don't have the specific call context at hand. - Variadic arguments (variable positional arguments and variable - keyword arguments) are inferred, inherently wrong, by astroid - as a Tuple, respectively a Dict with empty elements. - This can lead pylint to believe that a function call receives - too few arguments. + which don't have the specific call context at hand. Variadic + arguments (variable positional arguments and variable keyword + arguments) are inferred, inherently wrong, by astroid as a Tuple, + respectively a Dict with empty elements. This can lead pylint to + believe that a function call receives too few arguments. """ scope = node.scope() is_in_lambda_scope = not isinstance(scope, nodes.FunctionDef) and isinstance( @@ -1142,8 +1141,8 @@ def visit_assign(self, node: nodes.Assign) -> None: self._check_dundername_is_string(node) def _check_assignment_from_function_call(self, node: nodes.Assign) -> None: - """Check that if assigning to a function call, the function is - possibly returning something valuable + """Check that if assigning to a function call, the function is possibly + returning something valuable. """ if not isinstance(node.value, nodes.Call): return @@ -1233,8 +1232,8 @@ def _check_dundername_is_string(self, node) -> None: self.add_message("non-str-assignment-to-dunder-name", node=node) def _check_uninferable_call(self, node): - """Check that the given uninferable Call node does not - call an actual function. + """Check that the given uninferable Call node does not call an actual + function. """ if not isinstance(node.func, nodes.Attribute): return @@ -1284,8 +1283,9 @@ def _check_uninferable_call(self, node): def _check_argument_order(self, node, call_site, called, called_param_names): """Match the supplied argument names against the function parameters. - Warn if some argument names are not in the same order as they are in - the function signature. + + Warn if some argument names are not in the same order as they + are in the function signature. """ # Check for called function being an object instance function # If so, ignore the initial 'self' argument in the signature @@ -1333,9 +1333,9 @@ def _check_isinstance_args(self, node): # pylint: disable=too-many-branches,too-many-locals @check_messages(*(list(MSGS.keys()))) def visit_call(self, node: nodes.Call) -> None: - """Check that called functions/methods are inferred to callable objects, - and that the arguments passed to the function match the parameters in - the inferred function's definition + """Check that called functions/methods are inferred to callable + objects, and that the arguments passed to the function match the + parameters in the inferred function's definition. """ called = safe_infer(node.func) @@ -1533,8 +1533,8 @@ def visit_call(self, node: nodes.Call) -> None: def _keyword_argument_is_in_all_decorator_returns( func: nodes.FunctionDef, keyword: str ) -> bool: - """Check if the keyword argument exists in all signatures of the - return values of all decorators of the function. + """Check if the keyword argument exists in all signatures of the return + values of all decorators of the function. """ if not func.decorators: return False @@ -1648,8 +1648,9 @@ def _check_not_callable( ) -> None: """Checks to see if the not-callable message should be emitted. - Only functions, generators and objects defining __call__ are "callable" - We ignore instances of descriptors since astroid cannot properly handle them yet + Only functions, generators and objects defining __call__ are + "callable" We ignore instances of descriptors since astroid + cannot properly handle them yet """ # Handle uninferable calls if not inferred_call or inferred_call.callable(): @@ -1815,7 +1816,9 @@ def visit_binop(self, node: nodes.BinOp) -> None: self._detect_unsupported_alternative_union_syntax(node) def _detect_unsupported_alternative_union_syntax(self, node: nodes.BinOp) -> None: - """Detect if unsupported alternative Union syntax (PEP 604) was used.""" + """Detect if unsupported alternative Union syntax (PEP 604) was + used. + """ if self._py310_plus: # 310+ supports the new syntax return @@ -1995,8 +1998,8 @@ def visit_for(self, node: nodes.For) -> None: class IterableChecker(BaseChecker): - """Checks for non-iterables used in an iterable context. - Contexts include: + """Checks for non-iterables used in an iterable context. Contexts include:. + - for-statement - starargs in function call - `yield from`-statement diff --git a/pylint/checkers/unicode.py b/pylint/checkers/unicode.py index 93dfacb4b6..598774d952 100644 --- a/pylint/checkers/unicode.py +++ b/pylint/checkers/unicode.py @@ -2,8 +2,8 @@ # Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html # For details: https://github.com/PyCQA/pylint/blob/main/LICENSE -"""Unicode and some other ASCII characters can be used to create programs that run -much different compared to what a human reader would expect from them. +"""Unicode and some other ASCII characters can be used to create programs that +run much different compared to what a human reader would expect from them. PEP 672 lists some examples. See: https://www.python.org/dev/peps/pep-0672/ @@ -158,8 +158,8 @@ def _map_positions_to_result( """Get all occurrences of search dict keys within line. Ignores Windows end of line and can handle bytes as well as string. - Also takes care of encodings for which the length of an encoded code point does not - default to 8 Bit. + Also takes care of encodings for which the length of an encoded code + point does not default to 8 Bit. """ result: Dict[int, _BadChar] = {} diff --git a/pylint/checkers/unsupported_version.py b/pylint/checkers/unsupported_version.py index ec1234091f..f5d64dcd7f 100644 --- a/pylint/checkers/unsupported_version.py +++ b/pylint/checkers/unsupported_version.py @@ -5,8 +5,8 @@ # Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html # For details: https://github.com/PyCQA/pylint/blob/main/LICENSE -"""Checker for features used that are not supported by all python versions indicated by -the py-version setting. +"""Checker for features used that are not supported by all python versions +indicated by the py-version setting. """ @@ -28,8 +28,8 @@ class UnsupportedVersionChecker(BaseChecker): - """Checker for features that are not supported by all python versions indicated by - the py-version setting. + """Checker for features that are not supported by all python versions + indicated by the py-version setting. """ __implements__ = (IAstroidChecker,) @@ -67,8 +67,8 @@ def visit_decorators(self, node: nodes.Decorators) -> None: self._check_typing_final(node) def _check_typing_final(self, node: nodes.Decorators) -> None: - """Add a message when the `typing.final` decorator is used and the py- version - is lower than 3.8. + """Add a message when the `typing.final` decorator is used and the py- + version is lower than 3.8. """ if self._py38_plus: return diff --git a/pylint/checkers/utils.py b/pylint/checkers/utils.py index 026a78b5e7..4237a81be8 100644 --- a/pylint/checkers/utils.py +++ b/pylint/checkers/utils.py @@ -330,12 +330,16 @@ def is_error(node: nodes.FunctionDef) -> bool: def is_builtin_object(node: nodes.NodeNG) -> bool: - """Returns True if the given node is an object from the __builtin__ module.""" + """Returns True if the given node is an object from the __builtin__ + module. + """ return node and node.root().name == "builtins" def is_builtin(name: str) -> bool: - """Return true if could be considered as a builtin defined by python.""" + """Return true if could be considered as a builtin defined by + python. + """ return name in builtins or name in SPECIAL_BUILTINS # type: ignore[attr-defined] @@ -390,10 +394,10 @@ def is_defined_in_scope( def is_defined_before(var_node: nodes.Name) -> bool: """Check if the given variable node is defined before. - Verify that the variable node is defined by a parent node - (list, set, dict, or generator comprehension, lambda) - or in a previous sibling node on the same line - (statement_defining ; statement_using). + Verify that the variable node is defined by a parent node (list, + set, dict, or generator comprehension, lambda) or in a previous + sibling node on the same line (statement_defining ; + statement_using). """ varname = var_node.name for parent in var_node.node_ancestors(): @@ -417,8 +421,8 @@ def is_defined_before(var_node: nodes.Name) -> bool: def is_default_argument( node: nodes.NodeNG, scope: Optional[nodes.NodeNG] = None ) -> bool: - """Return true if the given Name node is used in function or lambda - default argument's value + """Return true if the given Name node is used in function or lambda default + argument's value. """ if not scope: scope = node.scope() @@ -454,7 +458,7 @@ def is_func_decorator(node: nodes.NodeNG) -> bool: def is_ancestor_name(frame: nodes.ClassDef, node: nodes.NodeNG) -> bool: """Return whether `frame` is an astroid.Class node with `node` in the - subtree of its bases attribute + subtree of its bases attribute. """ if not isinstance(frame, nodes.ClassDef): return False @@ -467,15 +471,17 @@ def is_being_called(node: nodes.NodeNG) -> bool: def assign_parent(node: nodes.NodeNG) -> nodes.NodeNG: - """Return the higher parent which is not an AssignName, Tuple or List node.""" + """Return the higher parent which is not an AssignName, Tuple or List + node. + """ while node and isinstance(node, (nodes.AssignName, nodes.Tuple, nodes.List)): node = node.parent return node def overrides_a_method(class_node: nodes.ClassDef, name: str) -> bool: - """Return True if is a method overridden from an ancestor - which is not the base object class + """Return True if is a method overridden from an ancestor which is + not the base object class. """ for ancestor in class_node.ancestors(): if ancestor.name == "object": @@ -500,8 +506,8 @@ class IncompleteFormatString(Exception): class UnsupportedFormatCharacter(Exception): - """A format character in a format string is not one of the supported - format characters. + """A format character in a format string is not one of the supported format + characters. """ def __init__(self, index): @@ -512,10 +518,12 @@ def __init__(self, index): def parse_format_string( format_string: str, ) -> Tuple[Set[str], int, Dict[str, str], List[str]]: - """Parses a format string, returning a tuple of (keys, num_args), where 'keys' - is the set of mapping keys in the format string, and 'num_args' is the number - of arguments required by the format string. Raises IncompleteFormatString or - UnsupportedFormatCharacter if a parse error occurs. + """Parses a format string, returning a tuple of (keys, num_args), where + 'keys' is the set of mapping keys in the format string, and 'num_args' is + the number of arguments required by the format string. + + Raises IncompleteFormatString or UnsupportedFormatCharacter if a + parse error occurs. """ keys = set() key_types = {} @@ -592,9 +600,10 @@ def split_format_field_names(format_string) -> Tuple[str, Iterable[Tuple[bool, s def collect_string_fields(format_string) -> Iterable[Optional[str]]: - """Given a format string, return an iterator - of all the valid format fields. It handles nested fields - as well. + """Given a format string, return an iterator of all the valid format + fields. + + It handles nested fields as well. """ formatter = string.Formatter() try: @@ -627,10 +636,11 @@ def parse_format_method_string( format_string: str, ) -> Tuple[List[Tuple[str, List[Tuple[bool, str]]]], int, int]: """Parses a PEP 3101 format string, returning a tuple of - (keyword_arguments, implicit_pos_args_cnt, explicit_pos_args), - where keyword_arguments is the set of mapping keys in the format string, implicit_pos_args_cnt - is the number of arguments required by the format string and - explicit_pos_args is the number of arguments passed with the position. + (keyword_arguments, implicit_pos_args_cnt, explicit_pos_args), where + keyword_arguments is the set of mapping keys in the format string, + implicit_pos_args_cnt is the number of arguments required by the format + string and explicit_pos_args is the number of arguments passed with the + position. """ keyword_arguments = [] implicit_pos_args_cnt = 0 @@ -665,8 +675,8 @@ def is_attr_protected(attrname: str) -> bool: def node_frame_class(node: nodes.NodeNG) -> Optional[nodes.ClassDef]: """Return the class that is wrapping the given node. - The function returns a class for a method node (or a staticmethod or a - classmethod), otherwise it returns `None`. + The function returns a class for a method node (or a staticmethod or + a classmethod), otherwise it returns `None`. """ klass = node.frame(future=True) nodes_to_check = ( @@ -688,7 +698,9 @@ def node_frame_class(node: nodes.NodeNG) -> Optional[nodes.ClassDef]: def get_outer_class(class_node: astroid.ClassDef) -> Optional[astroid.ClassDef]: - """Return the class that is the outer class of given (nested) class_node.""" + """Return the class that is the outer class of given (nested) + class_node. + """ parent_klass = class_node.parent.frame(future=True) return parent_klass if isinstance(parent_klass, astroid.ClassDef) else None @@ -745,14 +757,13 @@ def inherit_from_std_ex(node: nodes.NodeNG) -> bool: def error_of_type(handler: nodes.ExceptHandler, error_type) -> bool: - """Check if the given exception handler catches - the given error_type. - - The *handler* parameter is a node, representing an ExceptHandler node. - The *error_type* can be an exception, such as AttributeError, - the name of an exception, or it can be a tuple of errors. - The function will return True if the handler catches any of the - given errors. + """Check if the given exception handler catches the given error_type. + + The *handler* parameter is a node, representing an ExceptHandler + node. The *error_type* can be an exception, such as AttributeError, + the name of an exception, or it can be a tuple of errors. The + function will return True if the handler catches any of the given + errors. """ def stringify_error(error): @@ -839,7 +850,9 @@ def decorated_with( ], qnames: Iterable[str], ) -> bool: - """Determine if the `func` node has a decorator with the qualified name `qname`.""" + """Determine if the `func` node has a decorator with the qualified name + `qname`. + """ decorators = func.decorators.nodes if func.decorators else [] for decorator_node in decorators: if isinstance(decorator_node, nodes.Call): @@ -862,9 +875,9 @@ def uninferable_final_decorators( ) -> List[Optional[Union[nodes.Attribute, nodes.Name]]]: """Return a list of uninferable `typing.final` decorators in `node`. - This function is used to determine if the `typing.final` decorator is used - with an unsupported Python version; the decorator cannot be inferred when - using a Python version lower than 3.8. + This function is used to determine if the `typing.final` decorator + is used with an unsupported Python version; the decorator cannot be + inferred when using a Python version lower than 3.8. """ decorators = [] for decorator in getattr(node, "nodes", []): @@ -909,12 +922,11 @@ def unimplemented_abstract_methods( """Get the unimplemented abstract methods for the given *node*. A method can be considered abstract if the callback *is_abstract_cb* - returns a ``True`` value. The check defaults to verifying that - a method is decorated with abstract methods. - The function will work only for new-style classes. For old-style - classes, it will simply return an empty dictionary. - For the rest of them, it will return a dictionary of abstract method - names and their inferred objects. + returns a ``True`` value. The check defaults to verifying that a + method is decorated with abstract methods. The function will work + only for new-style classes. For old-style classes, it will simply + return an empty dictionary. For the rest of them, it will return a + dictionary of abstract method names and their inferred objects. """ if is_abstract_cb is None: is_abstract_cb = partial(decorated_with, qnames=ABC_METHODS) @@ -977,7 +989,9 @@ def find_try_except_wrapper_node( def find_except_wrapper_node_in_scope( node: nodes.NodeNG, ) -> Optional[Union[nodes.ExceptHandler, nodes.TryExcept]]: - """Return the ExceptHandler in which the node is, without going out of scope.""" + """Return the ExceptHandler in which the node is, without going out of + scope. + """ for current in node.node_ancestors(): if isinstance(current, astroid.scoped_nodes.LocalsDictNodeNG): # If we're inside a function/class definition, we don't want to keep checking @@ -1034,7 +1048,6 @@ def get_exception_handlers( Returns: list: the collection of handlers that are handling the exception or None. - """ context = find_try_except_wrapper_node(node) if isinstance(context, nodes.TryExcept): @@ -1045,8 +1058,8 @@ def get_exception_handlers( def is_node_inside_try_except(node: nodes.Raise) -> bool: - """Check if the node is directly under a Try/Except statement. - (but not under an ExceptHandler!) + """Check if the node is directly under a Try/Except statement. (but not + under an ExceptHandler!) Args: node (nodes.Raise): the node raising the exception. @@ -1061,8 +1074,8 @@ def is_node_inside_try_except(node: nodes.Raise) -> bool: def node_ignores_exception(node: nodes.NodeNG, exception=Exception) -> bool: """Check if the node is in a TryExcept which handles the given exception. - If the exception is not given, the function is going to look for bare - excepts. + If the exception is not given, the function is going to look for + bare excepts. """ managing_handlers = get_exception_handlers(node, exception) if not managing_handlers: @@ -1072,7 +1085,7 @@ def node_ignores_exception(node: nodes.NodeNG, exception=Exception) -> bool: def class_is_abstract(node: nodes.ClassDef) -> bool: """Return true if the given class node should be considered as an abstract - class + class. """ # Only check for explicit metaclass=ABCMeta on this specific class meta = node.declared_metaclass() @@ -1242,8 +1255,8 @@ def _get_python_type_of_node(node): def safe_infer(node: nodes.NodeNG, context=None) -> Optional[nodes.NodeNG]: """Return the inferred value for the given node. - Return None if inference failed or if there is some ambiguity (more than - one node has been inferred of different types). + Return None if inference failed or if there is some ambiguity (more + than one node has been inferred of different types). """ inferred_types = set() try: @@ -1315,8 +1328,8 @@ def is_none(node: nodes.NodeNG) -> bool: def node_type(node: nodes.NodeNG) -> Optional[nodes.NodeNG]: """Return the inferred type for `node`. - If there is more than one possible type, or if inferred type is Uninferable or None, - return None + If there is more than one possible type, or if inferred type is + Uninferable or None, return None """ # check there is only one possible type for the assign node. Else we # don't handle it for now @@ -1366,9 +1379,11 @@ def is_registered_in_singledispatch_function(node: nodes.FunctionDef) -> bool: def get_node_last_lineno(node: nodes.NodeNG) -> int: - """Get the last lineno of the given node. For a simple statement this will just be node.lineno, - but for a node that has child statements (e.g. a method) this will be the lineno of the last - child statement recursively. + """Get the last lineno of the given node. + + For a simple statement this will just be node.lineno, but for a node + that has child statements (e.g. a method) this will be the lineno of + the last child statement recursively. """ # 'finalbody' is always the last clause in a try statement, if present if getattr(node, "finalbody", False): @@ -1396,8 +1411,8 @@ def is_postponed_evaluation_enabled(node: nodes.NodeNG) -> bool: def is_class_subscriptable_pep585_with_postponed_evaluation_enabled( value: nodes.ClassDef, node: nodes.NodeNG ) -> bool: - """Check if class is subscriptable with PEP 585 and - postponed evaluation enabled. + """Check if class is subscriptable with PEP 585 and postponed evaluation + enabled. """ return ( is_postponed_evaluation_enabled(node) @@ -1409,8 +1424,8 @@ def is_class_subscriptable_pep585_with_postponed_evaluation_enabled( def is_node_in_type_annotation_context(node: nodes.NodeNG) -> bool: """Check if node is in type annotation context. - Check for 'AnnAssign', function 'Arguments', - or part of function return type anntation. + Check for 'AnnAssign', function 'Arguments', or part of function + return type anntation. """ # pylint: disable=too-many-boolean-expressions current_node, parent_node = node, node.parent @@ -1438,6 +1453,7 @@ def is_node_in_type_annotation_context(node: nodes.NodeNG) -> bool: def is_subclass_of(child: nodes.ClassDef, parent: nodes.ClassDef) -> bool: """Check if first node is a subclass of second node. + :param child: Node to check for subclass. :param parent: Node to check for superclass. :returns: True if child is derived from parent. False otherwise. @@ -1511,8 +1527,8 @@ def is_classdef_type(node: nodes.ClassDef) -> bool: def is_attribute_typed_annotation( node: Union[nodes.ClassDef, astroid.Instance], attr_name: str ) -> bool: - """Test if attribute is typed annotation in current node - or any base nodes. + """Test if attribute is typed annotation in current node or any base + nodes. """ attribute = node.locals.get(attr_name, [None])[0] if ( @@ -1556,11 +1572,11 @@ def is_assign_name_annotated_with(node: nodes.AssignName, typing_name: str) -> b def get_iterating_dictionary_name( node: Union[nodes.For, nodes.Comprehension] ) -> Optional[str]: - """Get the name of the dictionary which keys are being iterated over on - a ``nodes.For`` or ``nodes.Comprehension`` node. + """Get the name of the dictionary which keys are being iterated over on a + ``nodes.For`` or ``nodes.Comprehension`` node. - If the iterating object is not either the keys method of a dictionary - or a dictionary itself, this returns None. + If the iterating object is not either the keys method of a + dictionary or a dictionary itself, this returns None. """ # Is it a proper keys call? if ( @@ -1658,6 +1674,7 @@ def is_typing_guard(node: nodes.If) -> bool: def is_node_in_guarded_import_block(node: nodes.NodeNG) -> bool: """Return True if node is part for guarded if block. + I.e. `sys.version_info` or `typing.TYPE_CHECKING` """ return isinstance(node.parent, nodes.If) and ( @@ -1666,7 +1683,9 @@ def is_node_in_guarded_import_block(node: nodes.NodeNG) -> bool: def is_reassigned_after_current(node: nodes.NodeNG, varname: str) -> bool: - """Check if the given variable name is reassigned in the same scope after the current node.""" + """Check if the given variable name is reassigned in the same scope after + the current node. + """ return any( a.name == varname and a.lineno > node.lineno for a in node.scope().nodes_of_class( @@ -1676,7 +1695,9 @@ def is_reassigned_after_current(node: nodes.NodeNG, varname: str) -> bool: def is_deleted_after_current(node: nodes.NodeNG, varname: str) -> bool: - """Check if the given variable name is deleted in the same scope after the current node.""" + """Check if the given variable name is deleted in the same scope after the + current node. + """ return any( getattr(target, "name", None) == varname and target.lineno > node.lineno for del_node in node.scope().nodes_of_class(nodes.Delete) @@ -1720,7 +1741,9 @@ def returns_bool(node: nodes.NodeNG) -> bool: def get_node_first_ancestor_of_type( node: nodes.NodeNG, ancestor_type: Union[Type[T_Node], Tuple[Type[T_Node], ...]] ) -> Optional[T_Node]: - """Return the first parent node that is any of the provided types (or None).""" + """Return the first parent node that is any of the provided types (or + None). + """ for ancestor in node.node_ancestors(): if isinstance(ancestor, ancestor_type): return ancestor @@ -1731,9 +1754,10 @@ def get_node_first_ancestor_of_type_and_its_child( node: nodes.NodeNG, ancestor_type: Union[Type[T_Node], Tuple[Type[T_Node], ...]] ) -> Union[Tuple[None, None], Tuple[T_Node, nodes.NodeNG]]: """Modified version of get_node_first_ancestor_of_type to also return the - descendant visited directly before reaching the sought ancestor. Useful - for extracting whether a statement is guarded by a try, except, or finally - when searching for a TryFinally ancestor. + descendant visited directly before reaching the sought ancestor. + + Useful for extracting whether a statement is guarded by a try, + except, or finally when searching for a TryFinally ancestor. """ child = node for ancestor in node.node_ancestors(): diff --git a/pylint/checkers/variables.py b/pylint/checkers/variables.py index 1781866aa2..5dbdcc6792 100644 --- a/pylint/checkers/variables.py +++ b/pylint/checkers/variables.py @@ -214,8 +214,8 @@ def overridden_method(klass, name): def _get_unpacking_extra_info(node, inferred): - """Return extra information to add to the message for unpacking-non-sequence - and unbalanced-tuple-unpacking errors + """Return extra information to add to the message for unpacking-non- + sequence and unbalanced-tuple-unpacking errors. """ more = "" inferred_module = inferred.root().name @@ -230,8 +230,7 @@ def _get_unpacking_extra_info(node, inferred): def _detect_global_scope(node, frame, defframe): - """Detect that the given frames shares a global - scope. + """Detect that the given frames shares a global scope. Two frames shares a global scope when neither of them are hidden under a function scope, as well @@ -244,7 +243,6 @@ class A: # B has the same global scope as `C`, leading to a NameError. class B(C): ... class C: ... - """ def_scope = scope = None if frame and frame.parent: @@ -301,10 +299,12 @@ def _infer_name_module(node, name): def _fix_dot_imports(not_consumed): - """Try to fix imports with multiple dots, by returning a dictionary - with the import names expanded. The function unflattens root imports, - like 'xml' (when we have both 'xml.etree' and 'xml.sax'), to 'xml.etree' - and 'xml.sax' respectively. + """Try to fix imports with multiple dots, by returning a dictionary with + the import names expanded. + + The function unflattens root imports, like 'xml' (when we have both + 'xml.etree' and 'xml.sax'), to 'xml.etree' and 'xml.sax' + respectively. """ names = {} for name, stmts in not_consumed.items(): @@ -343,7 +343,8 @@ def _fix_dot_imports(not_consumed): def _find_frame_imports(name, frame): - """Detect imports in the frame, with the required + """Detect imports in the frame, with the required. + *name*. Such imports can be considered assignments. Returns True if an import for the given name was found. """ @@ -556,7 +557,9 @@ class ScopeConsumer(NamedTuple): class NamesConsumer: - """A simple class to handle consumed, to consume and scope type info of node locals.""" + """A simple class to handle consumed, to consume and scope type info of + node locals. + """ def __init__(self, node, scope_type): self._atomic = ScopeConsumer( @@ -594,10 +597,12 @@ def consumed(self): @property def consumed_uncertain(self) -> DefaultDict[str, List[nodes.NodeNG]]: """Retrieves nodes filtered out by get_next_to_consume() that may not - have executed, such as statements in except blocks, or statements - in try blocks (when evaluating their corresponding except and finally - blocks). Checkers that want to treat the statements as executed - (e.g. for unused-variable) may need to add them back. + have executed, such as statements in except blocks, or statements in + try blocks (when evaluating their corresponding except and finally + blocks). + + Checkers that want to treat the statements as executed (e.g. for + unused-variable) may need to add them back. """ return self._atomic.consumed_uncertain @@ -607,8 +612,9 @@ def scope_type(self): def mark_as_consumed(self, name, consumed_nodes): """Mark the given nodes as consumed for the name. - If all of the nodes for the name were consumed, delete the name from - the to_consume dictionary + + If all of the nodes for the name were consumed, delete the name + from the to_consume dictionary """ unconsumed = [n for n in self.to_consume[name] if n not in set(consumed_nodes)] self.consumed[name] = consumed_nodes @@ -619,10 +625,12 @@ def mark_as_consumed(self, name, consumed_nodes): del self.to_consume[name] def get_next_to_consume(self, node: nodes.Name) -> Optional[List[nodes.NodeNG]]: - """Return a list of the nodes that define `node` from this scope. If it is - uncertain whether a node will be consumed, such as for statements in - except blocks, add it to self.consumed_uncertain instead of returning it. - Return None to indicate a special case that needs to be handled by the caller. + """Return a list of the nodes that define `node` from this scope. + + If it is uncertain whether a node will be consumed, such as for + statements in except blocks, add it to self.consumed_uncertain + instead of returning it. Return None to indicate a special case + that needs to be handled by the caller. """ name = node.name parent_node = node.parent @@ -701,8 +709,8 @@ def _uncertain_nodes_in_except_blocks( node: nodes.NodeNG, node_statement: nodes.Statement, ) -> List[nodes.NodeNG]: - """Return any nodes in ``found_nodes`` that should be treated as uncertain - because they are in an except block. + """Return any nodes in ``found_nodes`` that should be treated as + uncertain because they are in an except block. """ uncertain_nodes = [] for other_node in found_nodes: @@ -795,11 +803,11 @@ def _define_raise_or_return(stmt: nodes.NodeNG) -> bool: def _check_loop_finishes_via_except( node: nodes.NodeNG, other_node_try_except: nodes.TryExcept ) -> bool: - """Check for a case described in https://github.com/PyCQA/pylint/issues/5683. - It consists of a specific control flow scenario where the only - non-break exit from a loop consists of the very except handler we are - examining, such that code in the `else` branch of the loop can depend on it - being assigned. + """Check for a case described in + https://github.com/PyCQA/pylint/issues/5683. It consists of a specific + control flow scenario where the only non-break exit from a loop + consists of the very except handler we are examining, such that code in + the `else` branch of the loop can depend on it being assigned. Example: @@ -837,7 +845,9 @@ def _check_loop_finishes_via_except( def _try_in_loop_body( other_node_try_except: nodes.TryExcept, loop: Union[nodes.For, nodes.While] ) -> bool: - """Return True if `other_node_try_except` is a descendant of `loop`.""" + """Return True if `other_node_try_except` is a descendant of + `loop`. + """ return any( loop_body_statement is other_node_try_except or loop_body_statement.parent_of(other_node_try_except) @@ -867,8 +877,8 @@ def _try_in_loop_body( def _recursive_search_for_continue_before_break( stmt: nodes.Statement, break_stmt: nodes.Break ) -> bool: - """Return True if any Continue node can be found in descendants of `stmt` - before encountering `break_stmt`, ignoring any nested loops. + """Return True if any Continue node can be found in descendants of + `stmt` before encountering `break_stmt`, ignoring any nested loops. """ if stmt is break_stmt: return False @@ -887,9 +897,9 @@ def _recursive_search_for_continue_before_break( def _uncertain_nodes_in_try_blocks_when_evaluating_except_blocks( found_nodes: List[nodes.NodeNG], node_statement: nodes.Statement ) -> List[nodes.NodeNG]: - """Return any nodes in ``found_nodes`` that should be treated as uncertain - because they are in a try block and the ``node_statement`` being evaluated - is in one of its except handlers. + """Return any nodes in ``found_nodes`` that should be treated as + uncertain because they are in a try block and the ``node_statement`` + being evaluated is in one of its except handlers. """ uncertain_nodes: List[nodes.NodeNG] = [] closest_except_handler = utils.get_node_first_ancestor_of_type( @@ -1227,7 +1237,9 @@ def leave_setcomp(self, _: nodes.SetComp) -> None: self._to_consume.pop() def visit_functiondef(self, node: nodes.FunctionDef) -> None: - """Visit function: update consumption analysis variable and check locals.""" + """Visit function: update consumption analysis variable and check + locals. + """ self._to_consume.append(NamesConsumer(node, "function")) if not ( self.linter.is_message_enabled("redefined-outer-name") @@ -1462,7 +1474,8 @@ def _should_node_be_skipped( self, node: nodes.Name, consumer: NamesConsumer, is_start_index: bool ) -> bool: """Tests a consumer and node for various conditions in which the node - shouldn't be checked for the undefined-variable and used-before-assignment checks. + shouldn't be checked for the undefined-variable and used-before- + assignment checks. """ if consumer.scope_type == "class": # The list of base classes in the class definition is not part @@ -1773,9 +1786,8 @@ def visit_importfrom(self, node: nodes.ImportFrom) -> None: "unbalanced-tuple-unpacking", "unpacking-non-sequence", "self-cls-assignment" ) def visit_assign(self, node: nodes.Assign) -> None: - """Check unbalanced tuple unpacking for assignments - and unpacking non-sequences as well as in case self/cls - get assigned. + """Check unbalanced tuple unpacking for assignments and unpacking non- + sequences as well as in case self/cls get assigned. """ self._check_self_cls_assign(node) if not isinstance(node.targets[0], (nodes.Tuple, nodes.List)): @@ -1850,7 +1862,9 @@ def _defined_in_function_definition(node, frame): def _in_lambda_or_comprehension_body( node: nodes.NodeNG, frame: nodes.NodeNG ) -> bool: - """Return True if node within a lambda/comprehension body (or similar) and thus should not have access to class attributes in frame.""" + """Return True if node within a lambda/comprehension body (or similar) + and thus should not have access to class attributes in frame. + """ child = node parent = node.parent while parent is not None: @@ -2119,8 +2133,8 @@ def _is_only_type_assignment(node: nodes.Name, defstmt: nodes.Statement) -> bool def _is_first_level_self_reference( node: nodes.Name, defstmt: nodes.ClassDef, found_nodes: List[nodes.NodeNG] ) -> Tuple[VariableVisitConsumerAction, Optional[List[nodes.NodeNG]]]: - """Check if a first level method's annotation or default values - refers to its own class, and return a consumer action + """Check if a first level method's annotation or default values refers + to its own class, and return a consumer action. """ if node.frame(future=True).parent == defstmt and node.statement( future=True @@ -2418,7 +2432,8 @@ def _check_unused_arguments(self, name, node, stmt, argnames): self.add_message("unused-argument", args=name, node=stmt, confidence=confidence) def _check_late_binding_closure(self, node: nodes.Name) -> None: - """Check whether node is a cell var that is assigned within a containing loop. + """Check whether node is a cell var that is assigned within a + containing loop. Special cases where we don't care about the error: 1. When the node's function is immediately called, e.g. (lambda: i)() @@ -2478,9 +2493,8 @@ def _allowed_redefined_builtin(self, name): def _has_homonym_in_upper_function_scope( self, node: nodes.Name, index: int ) -> bool: - """Return whether there is a node with the same name in the - to_consume dict of an upper scope and if that scope is a - function + """Return whether there is a node with the same name in the to_consume + dict of an upper scope and if that scope is a function. :param node: node to check for :param index: index of the current consumer inside self._to_consume @@ -2555,9 +2569,7 @@ def _check_self_cls_assign(self, node: nodes.Assign) -> None: self.add_message("self-cls-assignment", node=node, args=(self_cls_name,)) def _check_unpacking(self, inferred, node, targets): - """Check for unbalanced tuple unpacking - and unpacking non sequences. - """ + """Check for unbalanced tuple unpacking and unpacking non sequences.""" if utils.is_inside_abstract_class(node): return if utils.is_comprehension(node): @@ -2609,8 +2621,8 @@ def _nodes_to_unpack(node: nodes.NodeNG) -> Optional[List[nodes.NodeNG]]: def _check_module_attrs(self, node, module, module_names): """Check that module_names (list of string) are accessible through the - given module - if the latest access name corresponds to a module, return it + given module if the latest access name corresponds to a module, return + it. """ while module_names: name = module_names.pop(0) diff --git a/pylint/config/config_initialization.py b/pylint/config/config_initialization.py index a006901a8e..896f4b2c20 100644 --- a/pylint/config/config_initialization.py +++ b/pylint/config/config_initialization.py @@ -19,8 +19,8 @@ def _config_initialization( config_file: Union[None, str, Path] = None, verbose_mode: Optional[bool] = None, ) -> List[str]: - """Parse all available options, read config files and command line arguments and set - options accordingly. + """Parse all available options, read config files and command line + arguments and set options accordingly. """ # Read the config file. The parser is stored on linter.cfgfile_parser diff --git a/pylint/config/configuration_mixin.py b/pylint/config/configuration_mixin.py index c5f29781d9..f3b686e25f 100644 --- a/pylint/config/configuration_mixin.py +++ b/pylint/config/configuration_mixin.py @@ -6,8 +6,8 @@ class ConfigurationMixIn(OptionsManagerMixIn, OptionsProviderMixIn): - """Basic mixin for simple configurations which don't need the manager / providers - model. + """Basic mixin for simple configurations which don't need the manager / + providers model. """ def __init__(self, *args, **kwargs): diff --git a/pylint/config/find_default_config_files.py b/pylint/config/find_default_config_files.py index 8d1fc1885d..f0c6ce24d5 100644 --- a/pylint/config/find_default_config_files.py +++ b/pylint/config/find_default_config_files.py @@ -71,8 +71,8 @@ def find_default_config_files() -> Iterator[str]: def find_pylintrc() -> Optional[str]: - """Search the pylint rc file and return its path if it finds it, else return - None. + """Search the pylint rc file and return its path if it finds it, else + return None. """ for config_file in find_default_config_files(): if config_file.endswith("pylintrc"): diff --git a/pylint/config/option_manager_mixin.py b/pylint/config/option_manager_mixin.py index 9260f75930..07e97ad718 100644 --- a/pylint/config/option_manager_mixin.py +++ b/pylint/config/option_manager_mixin.py @@ -57,7 +57,9 @@ def _patch_optparse(): class OptionsManagerMixIn: - """Handle configuration from both a configuration file and command line options.""" + """Handle configuration from both a configuration file and command line + options. + """ def __init__(self, usage, config_file=None): self.config_file = config_file @@ -143,7 +145,7 @@ def add_optik_option(self, provider, optikcontainer, opt, optdict): def optik_option(self, provider, opt, optdict): """Get our personal option definition and return a suitable form for - use with optik/optparse + use with optik/optparse. """ optdict = copy.copy(optdict) if "action" in optdict: @@ -193,7 +195,7 @@ def generate_config( self, stream: Optional[TextIO] = None, skipsections: Tuple[str, ...] = () ) -> None: """Write a configuration file according to the current configuration - into the given stream or stdout + into the given stream or stdout. """ options_by_section: Dict[str, List[Tuple]] = {} sections = [] @@ -332,7 +334,7 @@ def _parse_toml( def load_config_file(self): """Dispatch values previously read from a configuration file to each - option's provider + option's provider. """ parser = self.cfgfile_parser for section in parser.sections(): diff --git a/pylint/epylint.py b/pylint/epylint.py index bbe9e096ae..223f723a77 100755 --- a/pylint/epylint.py +++ b/pylint/epylint.py @@ -68,7 +68,9 @@ def _get_env(): - """Extracts the environment PYTHONPATH and appends the current 'sys.path' to it.""" + """Extracts the environment PYTHONPATH and appends the current 'sys.path' + to it. + """ env = dict(os.environ) env["PYTHONPATH"] = os.pathsep.join(sys.path) return env diff --git a/pylint/exceptions.py b/pylint/exceptions.py index 5999dcfd4a..fc04790d64 100644 --- a/pylint/exceptions.py +++ b/pylint/exceptions.py @@ -32,8 +32,12 @@ class InvalidReporterError(Exception): class InvalidArgsError(ValueError): - """Raised when passed arguments are invalid, e.g., have the wrong length.""" + """Raised when passed arguments are invalid, e.g., have the wrong + length. + """ class NoLineSuppliedError(Exception): - """Raised when trying to disable a message on a next line without supplying a line number.""" + """Raised when trying to disable a message on a next line without supplying + a line number. + """ diff --git a/pylint/extensions/_check_docs_utils.py b/pylint/extensions/_check_docs_utils.py index 4c3713c4ba..4065e51cee 100644 --- a/pylint/extensions/_check_docs_utils.py +++ b/pylint/extensions/_check_docs_utils.py @@ -782,7 +782,9 @@ class NumpyDocstring(GoogleDocstring): supports_yields = True def match_param_docs(self) -> Tuple[Set[str], Set[str]]: - """Matches parameter documentation section to parameter documentation rules.""" + """Matches parameter documentation section to parameter documentation + rules. + """ params_with_doc = set() params_with_type = set() diff --git a/pylint/extensions/broad_try_clause.py b/pylint/extensions/broad_try_clause.py index 118a027de9..c056e732d2 100644 --- a/pylint/extensions/broad_try_clause.py +++ b/pylint/extensions/broad_try_clause.py @@ -23,8 +23,9 @@ class BroadTryClauseChecker(checkers.BaseChecker): """Checks for try clauses with too many lines. - According to PEP 8, ``try`` clauses shall contain the absolute minimum amount of - code. This checker enforces a maximum number of statements within ``try`` clauses. + According to PEP 8, ``try`` clauses shall contain the absolute + minimum amount of code. This checker enforces a maximum number of + statements within ``try`` clauses. """ __implements__ = interfaces.IAstroidChecker diff --git a/pylint/extensions/code_style.py b/pylint/extensions/code_style.py index 9f9d801c93..3ecd6c661c 100644 --- a/pylint/extensions/code_style.py +++ b/pylint/extensions/code_style.py @@ -106,7 +106,9 @@ def visit_if(self, node: nodes.If) -> None: self._check_consider_using_assignment_expr(node) def _check_dict_consider_namedtuple_dataclass(self, node: nodes.Dict) -> None: - """Check if dictionary values can be replaced by Namedtuple or Dataclass.""" + """Check if dictionary values can be replaced by Namedtuple or + Dataclass. + """ if not ( isinstance(node.parent, (nodes.Assign, nodes.AnnAssign)) and isinstance(node.parent.parent, nodes.Module) @@ -279,8 +281,8 @@ def _check_ignore_assignment_expr_suggestion( ) -> bool: """Return True if suggestion for assignment expr should be ignored. - E.g., in cases where a match statement would be a better fit (multiple - conditions). + E.g., in cases where a match statement would be a better fit + (multiple conditions). """ if isinstance(node.test, nodes.Compare): next_if_node: Optional[nodes.If] = None diff --git a/pylint/extensions/comparetozero.py b/pylint/extensions/comparetozero.py index 0a81039cbc..474f9c778a 100644 --- a/pylint/extensions/comparetozero.py +++ b/pylint/extensions/comparetozero.py @@ -32,9 +32,9 @@ def _is_constant_zero(node): class CompareToZeroChecker(checkers.BaseChecker): """Checks for comparisons to zero. - Most of the time you should use the fact that integers with a value of 0 are false. - An exception to this rule is when 0 is allowed in the program and has a different - meaning than None! + Most of the time you should use the fact that integers with a value + of 0 are false. An exception to this rule is when 0 is allowed in + the program and has a different meaning than None! """ __implements__ = (interfaces.IAstroidChecker,) diff --git a/pylint/extensions/confusing_elif.py b/pylint/extensions/confusing_elif.py index f490a54f19..87a9927c8a 100644 --- a/pylint/extensions/confusing_elif.py +++ b/pylint/extensions/confusing_elif.py @@ -20,8 +20,8 @@ class ConfusingConsecutiveElifChecker(BaseChecker): - """Checks if "elif" is used right after an indented block that finishes with "if" or - "elif" itself. + """Checks if "elif" is used right after an indented block that finishes + with "if" or "elif" itself. """ __implements__ = IAstroidChecker diff --git a/pylint/extensions/docparams.py b/pylint/extensions/docparams.py index ad891f45dd..3956018537 100644 --- a/pylint/extensions/docparams.py +++ b/pylint/extensions/docparams.py @@ -387,8 +387,8 @@ def _compare_missing_args( expected_argument_names, warning_node, ): - """Compare the found argument names with the expected ones and generate a - message if there are arguments missing. + """Compare the found argument names with the expected ones and generate + a message if there are arguments missing. :param found_argument_names: argument names found in the docstring :type found_argument_names: set @@ -423,8 +423,8 @@ def _compare_different_args( expected_argument_names, warning_node, ): - """Compare the found argument names with the expected ones and generate a - message if there are extra arguments found. + """Compare the found argument names with the expected ones and generate + a message if there are extra arguments found. :param found_argument_names: argument names found in the docstring :type found_argument_names: set @@ -461,8 +461,8 @@ def _compare_ignored_args( ignored_argument_names, warning_node, ): - """Compare the found argument names with the ignored ones and generate a message - if there are ignored arguments found. + """Compare the found argument names with the ignored ones and generate + a message if there are ignored arguments found. :param found_argument_names: argument names found in the docstring :type found_argument_names: set @@ -492,10 +492,10 @@ def check_arguments_in_docstring( warning_node: astroid.NodeNG, accept_no_param_doc: Optional[bool] = None, ): - """Check that all parameters in a function, method or class constructor on the - one hand and the parameters mentioned in the parameter documentation (e.g. the - Sphinx tags 'param' and 'type') on the other hand are consistent with each - other. + """Check that all parameters in a function, method or class constructor + on the one hand and the parameters mentioned in the parameter + documentation (e.g. the Sphinx tags 'param' and 'type') on the other + hand are consistent with each other. * Undocumented parameters except 'self' are noticed. * Undocumented parameter types except for 'self' and the ``*`` diff --git a/pylint/extensions/emptystring.py b/pylint/extensions/emptystring.py index 9a878bc05b..458034a3e9 100644 --- a/pylint/extensions/emptystring.py +++ b/pylint/extensions/emptystring.py @@ -27,9 +27,9 @@ class CompareToEmptyStringChecker(checkers.BaseChecker): """Checks for comparisons to empty string. - Most of the time you should use the fact that empty strings are false. An exception - to this rule is when an empty string value is allowed in the program and has a - different meaning than None! + Most of the time you should use the fact that empty strings are + false. An exception to this rule is when an empty string value is + allowed in the program and has a different meaning than None! """ __implements__ = (interfaces.IAstroidChecker,) diff --git a/pylint/extensions/for_any_all.py b/pylint/extensions/for_any_all.py index 690fc7bbf9..069d76dfec 100644 --- a/pylint/extensions/for_any_all.py +++ b/pylint/extensions/for_any_all.py @@ -47,9 +47,9 @@ def visit_for(self, node: nodes.For) -> None: @staticmethod def _build_suggested_string(node: nodes.For, final_return_bool: bool) -> str: - """When a nodes.For node can be rewritten as an any/all statement, return a - suggestion for that statement final_return_bool is the boolean literal returned - after the for loop if all conditions fail. + """When a nodes.For node can be rewritten as an any/all statement, + return a suggestion for that statement final_return_bool is the boolean + literal returned after the for loop if all conditions fail. """ loop_var = node.target.as_string() loop_iter = node.iter.as_string() diff --git a/pylint/extensions/mccabe.py b/pylint/extensions/mccabe.py index b84865decf..501ec243bc 100644 --- a/pylint/extensions/mccabe.py +++ b/pylint/extensions/mccabe.py @@ -180,8 +180,8 @@ class McCabeMethodChecker(checkers.BaseChecker): @check_messages("too-complex") def visit_module(self, node: nodes.Module) -> None: - """Visit an astroid.Module node to check too complex rating and - add message if is greater than max_complexity stored from options + """Visit an astroid.Module node to check too complex rating and add + message if is greater than max_complexity stored from options. """ visitor = PathGraphingAstVisitor() for child in node.body: diff --git a/pylint/extensions/overlapping_exceptions.py b/pylint/extensions/overlapping_exceptions.py index 11c79eb04e..4ab47494c7 100644 --- a/pylint/extensions/overlapping_exceptions.py +++ b/pylint/extensions/overlapping_exceptions.py @@ -17,9 +17,9 @@ class OverlappingExceptionsChecker(checkers.BaseChecker): - """Checks for two or more exceptions in the same exception handler - clause that are identical or parts of the same inheritance hierarchy - (i.e. overlapping). + """Checks for two or more exceptions in the same exception handler clause + that are identical or parts of the same inheritance hierarchy (i.e. + overlapping). """ __implements__ = interfaces.IAstroidChecker diff --git a/pylint/extensions/set_membership.py b/pylint/extensions/set_membership.py index 6ba5166c5b..7b9ad9647b 100644 --- a/pylint/extensions/set_membership.py +++ b/pylint/extensions/set_membership.py @@ -36,7 +36,9 @@ def visit_compare(self, node: nodes.Compare) -> None: self._check_in_comparison(comparator) def _check_in_comparison(self, comparator: nodes.NodeNG) -> None: - """Checks for membership comparisons with in-place container objects.""" + """Checks for membership comparisons with in-place container + objects. + """ if not isinstance(comparator, nodes.BaseContainer) or isinstance( comparator, nodes.Set ): diff --git a/pylint/extensions/typing.py b/pylint/extensions/typing.py index fb5aa2b560..bf1641a721 100644 --- a/pylint/extensions/typing.py +++ b/pylint/extensions/typing.py @@ -257,8 +257,8 @@ def _check_for_typing_alias( @check_messages("consider-using-alias") def leave_module(self, node: nodes.Module) -> None: - """After parsing of module is complete, add messages for 'consider- using-alias' - check. + """After parsing of module is complete, add messages for 'consider- + using-alias' check. Make sure results are safe to recommend / collision free. """ diff --git a/pylint/lint/expand_modules.py b/pylint/lint/expand_modules.py index 296e6b5778..b400768882 100644 --- a/pylint/lint/expand_modules.py +++ b/pylint/lint/expand_modules.py @@ -17,8 +17,10 @@ def _is_package_cb(inner_path, parts): def get_python_path(filepath: str) -> str: - """TODO This get the python path with the (bad) assumption that there is always - an __init__.py. This is not true since python 3.3 and is causing problem. + """TODO This get the python path with the (bad) assumption that there is + always an __init__.py. + + This is not true since python 3.3 and is causing problem. """ dirname = os.path.realpath(os.path.expanduser(filepath)) if not os.path.isdir(dirname): @@ -44,7 +46,7 @@ def expand_modules( ignore_list_paths_re: List[Pattern[str]], ) -> Tuple[List[ModuleDescriptionDict], List[ErrorDescriptionDict]]: """Take a list of files/modules/packages and return the list of tuple - (file, module name) which have to be actually checked + (file, module name) which have to be actually checked. """ result: List[ModuleDescriptionDict] = [] errors: List[ErrorDescriptionDict] = [] diff --git a/pylint/lint/parallel.py b/pylint/lint/parallel.py index 49d356e6fb..c53a5bddee 100644 --- a/pylint/lint/parallel.py +++ b/pylint/lint/parallel.py @@ -51,8 +51,8 @@ def _get_new_args(message): def _worker_initialize( linter: bytes, arguments: Union[None, str, Sequence[str]] = None ) -> None: - """Function called to initialize a worker for a Process within a multiprocessing - Pool. + """Function called to initialize a worker for a Process within a + multiprocessing Pool. :param linter: A linter-class (PyLinter) instance pickled with dill :param arguments: File or module name(s) to lint and to be added to sys.path @@ -108,7 +108,9 @@ def _worker_check_single_file( def _merge_mapreduce_data(linter, all_mapreduce_data): - """Merges map/reduce data across workers, invoking relevant APIs on checkers.""" + """Merges map/reduce data across workers, invoking relevant APIs on + checkers. + """ # First collate the data and prepare it, so we can send it to the checkers for # validation. The intent here is to collect all the mapreduce data for all checker- # runs across processes - that will then be passed to a static method on the @@ -134,11 +136,12 @@ def check_parallel( files: Iterable[FileItem], arguments: Union[None, str, Sequence[str]] = None, ) -> None: - """Use the given linter to lint the files with given amount of workers (jobs) This - splits the work filestream-by-filestream. + """Use the given linter to lint the files with given amount of workers + (jobs) This splits the work filestream-by-filestream. - If you need to do work across multiple files, as in the similarity- checker, then - inherit from MapReduceMixin and implement the map/reduce mixin functionality. + If you need to do work across multiple files, as in the similarity- + checker, then inherit from MapReduceMixin and implement the + map/reduce mixin functionality. """ # The linter is inherited by all the pool's workers, i.e. the linter # is identical to the linter object here. This is required so that diff --git a/pylint/lint/pylinter.py b/pylint/lint/pylinter.py index 2f5389d613..0e4e25d137 100644 --- a/pylint/lint/pylinter.py +++ b/pylint/lint/pylinter.py @@ -551,6 +551,7 @@ def __init__( pylintrc: Optional[str] = None, ) -> None: """Some stuff has to be done before ancestors initialization... + messages store / checkers / reporter / astroid manager """ # Attributes for reporters @@ -631,8 +632,8 @@ def load_default_plugins(self): reporters.initialize(self) def load_plugin_modules(self, modnames): - """Take a list of module names which are pylint plugins and load - and register them + """Take a list of module names which are pylint plugins and load and + register them. """ for modname in modnames: if modname in self._dynamic_plugins: @@ -647,9 +648,9 @@ def load_plugin_modules(self, modnames): def load_plugin_configuration(self): """Call the configuration hook for plugins. - This walks through the list of plugins, grabs the "load_configuration" - hook, if exposed, and calls it to allow plugins to configure specific - settings. + This walks through the list of plugins, grabs the + "load_configuration" hook, if exposed, and calls it to allow + plugins to configure specific settings. """ for modname in self._dynamic_plugins: try: @@ -711,8 +712,8 @@ def set_reporter( reporter.linter = self def set_option(self, optname, value, action=None, optdict=None): - """Overridden from config.OptionsProviderMixin to handle some - special options + """Overridden from config.OptionsProviderMixin to handle some special + options. """ if optname in self._options_methods or optname in self._bw_options_methods: if value: @@ -776,8 +777,9 @@ def register_checker(self, checker: checkers.BaseChecker) -> None: def enable_fail_on_messages(self): """Enable 'fail on' msgs. - Convert values in config.fail_on (which might be msg category, msg id, - or symbol) to specific msgs, then enable and flag them for later. + Convert values in config.fail_on (which might be msg category, + msg id, or symbol) to specific msgs, then enable and flag them + for later. """ fail_on_vals = self.config.fail_on if not fail_on_vals: @@ -858,8 +860,8 @@ def list_messages_enabled(self): # see func_block_disable_msg.py test case for expected behaviour def process_tokens(self, tokens): - """Process tokens from the current module to search for module/block level - options. + """Process tokens from the current module to search for module/block + level options. """ control_pragmas = {"disable", "disable-next", "enable"} prev_line = None @@ -1040,9 +1042,11 @@ def _discover_files(files_or_modules: Sequence[str]) -> Iterator[str]: yield something def check(self, files_or_modules: Union[Sequence[str], str]) -> None: - """Main checking entry: check a list of files or modules from their name. + """Main checking entry: check a list of files or modules from their + name. - files_or_modules is either a string or list of strings presenting modules to check. + files_or_modules is either a string or list of strings + presenting modules to check. """ self.initialize() if not isinstance(files_or_modules, (list, tuple)): @@ -1121,7 +1125,8 @@ def _check_files( self.add_message(symbol, args=msg) def _check_file(self, get_ast, check_astroid_module, file: FileItem): - """Check a file using the passed utility functions (get_ast and check_astroid_module). + """Check a file using the passed utility functions (get_ast and + check_astroid_module). :param callable get_ast: callable returning AST from defined file taking the following arguments - filepath: path to the file to check @@ -1152,10 +1157,11 @@ def _check_file(self, get_ast, check_astroid_module, file: FileItem): @staticmethod def _get_file_descr_from_stdin(filepath: str) -> FileItem: - """Return file description (tuple of module name, file path, base name) from given file path. + """Return file description (tuple of module name, file path, base name) + from given file path. - This method is used for creating suitable file description for _check_files when the - source is standard input. + This method is used for creating suitable file description for + _check_files when the source is standard input. """ try: # Note that this function does not really perform an @@ -1168,9 +1174,11 @@ def _get_file_descr_from_stdin(filepath: str) -> FileItem: return FileItem(modname, filepath, filepath) def _iterate_file_descrs(self, files_or_modules) -> Iterator[FileItem]: - """Return generator yielding file descriptions (tuples of module name, file path, base name). + """Return generator yielding file descriptions (tuples of module name, + file path, base name). - The returned generator yield one item for each Python module that should be linted. + The returned generator yield one item for each Python module + that should be linted. """ for descr in self._expand_files(files_or_modules): name, filepath, is_arg = descr["name"], descr["path"], descr["isarg"] @@ -1195,8 +1203,8 @@ def _expand_files(self, modules) -> List[ModuleDescriptionDict]: return result def set_current_module(self, modname, filepath: Optional[str] = None): - """Set the name of the currently analyzed module and - init statistics for it + """Set the name of the currently analyzed module and init statistics + for it. """ if not modname and filepath is None: return @@ -1218,7 +1226,8 @@ def set_current_module(self, modname, filepath: Optional[str] = None): def _astroid_module_checker(self): """Context manager for checking ASTs. - The value in the context is callable accepting AST as its only argument. + The value in the context is callable accepting AST as its only + argument. """ walker = ASTWalker(self) _checkers = self.prepare_checkers() @@ -1447,8 +1456,8 @@ def _get_message_state_scope( def _is_one_message_enabled(self, msgid: str, line: Optional[int]) -> bool: """Checks state of a single message for the current file. - This function can't be cached as it depends on self.file_state which can - change. + This function can't be cached as it depends on self.file_state + which can change. """ if line is None: return self._msgs_state.get(msgid, True) @@ -1486,7 +1495,8 @@ def is_message_enabled( line: Optional[int] = None, confidence: Optional[interfaces.Confidence] = None, ) -> bool: - """Return whether this message is enabled for the current file, line and confidence level. + """Return whether this message is enabled for the current file, line + and confidence level. This function can't be cached right now as the line is the line of the currently analysed file (self.file_state), if it changes, then the @@ -1519,8 +1529,8 @@ def _add_one_message( end_lineno: Optional[int], end_col_offset: Optional[int], ) -> None: - """After various checks have passed a single Message is - passed to the reporter and added to stats + """After various checks have passed a single Message is passed to the + reporter and added to stats. """ message_definition.check_message_definition(line, node) @@ -1637,10 +1647,11 @@ def add_ignored_message( ) -> None: """Prepares a message to be added to the ignored message storage. - Some checks return early in special cases and never reach add_message(), - even though they would normally issue a message. - This creates false positives for useless-suppression. - This function avoids this by adding those message to the ignored msgs attribute + Some checks return early in special cases and never reach + add_message(), even though they would normally issue a message. + This creates false positives for useless-suppression. This + function avoids this by adding those message to the ignored msgs + attribute """ message_definitions = self.msgs_store.get_message_definitions(msgid) for message_definition in message_definitions: @@ -1658,8 +1669,7 @@ def add_ignored_message( def _message_symbol(self, msgid: str) -> List[str]: """Get the message symbol of the given message id. - Return the original message id if the message does not - exist. + Return the original message id if the message does not exist. """ try: return [md.symbol for md in self.msgs_store.get_message_definitions(msgid)] @@ -1685,7 +1695,9 @@ def _set_one_msg_status( def _get_messages_to_set( self, msgid: str, enable: bool, ignore_unknown: bool = False ) -> List[MessageDefinition]: - """Do some tests and find the actual messages of which the status should be set.""" + """Do some tests and find the actual messages of which the status + should be set. + """ message_definitions = [] if msgid == "all": for _msgid in MSG_TYPES: @@ -1740,7 +1752,9 @@ def _set_msg_status( line: Optional[int] = None, ignore_unknown: bool = False, ) -> None: - """Do some tests and then iterate over message definitions to set state.""" + """Do some tests and then iterate over message definitions to set + state. + """ assert scope in {"package", "module"} message_definitions = self._get_messages_to_set(msgid, enable, ignore_unknown) diff --git a/pylint/lint/run.py b/pylint/lint/run.py index 6cbdc5cc71..06a42b8bfa 100644 --- a/pylint/lint/run.py +++ b/pylint/lint/run.py @@ -20,7 +20,9 @@ def _cpu_count() -> int: - """Use sched_affinity if available for virtualized or containerized environments.""" + """Use sched_affinity if available for virtualized or containerized + environments. + """ sched_getaffinity = getattr(os, "sched_getaffinity", None) # pylint: disable=not-callable,using-constant-test,useless-suppression if sched_getaffinity: @@ -398,7 +400,8 @@ def cb_add_plugins(self, name, value): self._plugins.extend(utils._splitstrip(value)) def cb_error_mode(self, *args, **kwargs): - """Error mode: + """Error mode:. + * disable all but error messages * disable the 'miscellaneous' checker which can be safely deactivated in debug @@ -440,8 +443,8 @@ def cb_list_messages_enabled(self, option, optname, value, parser): def cb_list_groups(self, *args, **kwargs): """List all the check groups that pylint knows about. - These should be useful to know what check groups someone can disable - or enable. + These should be useful to know what check groups someone can + disable or enable. """ for check in self.linter.get_checker_names(): print(check) diff --git a/pylint/lint/utils.py b/pylint/lint/utils.py index 410269e9d9..cd26844725 100644 --- a/pylint/lint/utils.py +++ b/pylint/lint/utils.py @@ -70,10 +70,10 @@ def get_fatal_error_message(filepath: str, issue_template_path: Path) -> str: def preprocess_options(args, search_for): """Look for some options (keys of ) which have to be processed - before others + before others. - values of are callback functions to call when the option is - found + values of are callback functions to call when the + option is found """ i = 0 while i < len(args): @@ -122,8 +122,8 @@ def fix_import_path(args): """Prepare 'sys.path' for running the linter checks. Within this context, each of the given arguments is importable. - Paths are added to 'sys.path' in corresponding order to the arguments. - We avoid adding duplicate directories to sys.path. + Paths are added to 'sys.path' in corresponding order to the + arguments. We avoid adding duplicate directories to sys.path. `sys.path` is reset to its original value upon exiting this context. """ original = _patch_sys_path(args) diff --git a/pylint/message/message_definition.py b/pylint/message/message_definition.py index a286f5a6d5..cf5721850d 100644 --- a/pylint/message/message_definition.py +++ b/pylint/message/message_definition.py @@ -58,7 +58,9 @@ def __str__(self) -> str: return f"{repr(self)}:\n{self.msg} {self.description}" def may_be_emitted(self) -> bool: - """Return True if message may be emitted using the current interpreter.""" + """Return True if message may be emitted using the current + interpreter. + """ if self.minversion is not None and self.minversion > sys.version_info: return False if self.maxversion is not None and self.maxversion <= sys.version_info: diff --git a/pylint/message/message_definition_store.py b/pylint/message/message_definition_store.py index ec41cfc0cd..5ef60b9d9b 100644 --- a/pylint/message/message_definition_store.py +++ b/pylint/message/message_definition_store.py @@ -15,8 +15,8 @@ class MessageDefinitionStore: - """The messages store knows information about every possible message definition but - has no particular state during analysis. + """The messages store knows information about every possible message + definition but has no particular state during analysis. """ def __init__(self) -> None: @@ -55,9 +55,9 @@ def register_message(self, message: MessageDefinition) -> None: def get_message_definitions(self, msgid_or_symbol: str) -> List[MessageDefinition]: """Returns the Message definition for either a numeric or symbolic id. - The cache has no limit as its size will likely stay minimal. For each message we - store about 1000 characters, so even if we would have 1000 messages the cache - would only take up ~= 1 Mb. + The cache has no limit as its size will likely stay minimal. For + each message we store about 1000 characters, so even if we would + have 1000 messages the cache would only take up ~= 1 Mb. """ return [ self._messages_definitions[m] diff --git a/pylint/message/message_id_store.py b/pylint/message/message_id_store.py index e6c025f449..cf267f9465 100644 --- a/pylint/message/message_id_store.py +++ b/pylint/message/message_id_store.py @@ -7,8 +7,8 @@ class MessageIdStore: - """The MessageIdStore store MessageId and make sure that there is a 1-1 relation - between msgid and symbol. + """The MessageIdStore store MessageId and make sure that there is a 1-1 + relation between msgid and symbol. """ def __init__(self) -> None: @@ -53,8 +53,9 @@ def register_message_definition( def add_msgid_and_symbol(self, msgid: str, symbol: str) -> None: """Add valid message id. - There is a little duplication with add_legacy_msgid_and_symbol to avoid a - function call, this is called a lot at initialization. + There is a little duplication with add_legacy_msgid_and_symbol + to avoid a function call, this is called a lot at + initialization. """ self.__msgid_to_symbol[msgid] = symbol self.__symbol_to_msgid[symbol] = msgid @@ -64,8 +65,8 @@ def add_legacy_msgid_and_symbol( ) -> None: """Add valid legacy message id. - There is a little duplication with add_msgid_and_symbol to avoid a function - call, this is called a lot at initialization. + There is a little duplication with add_msgid_and_symbol to avoid + a function call, this is called a lot at initialization. """ self.__msgid_to_symbol[msgid] = symbol self.__symbol_to_msgid[symbol] = msgid @@ -109,7 +110,8 @@ def _raise_duplicate_msgid(symbol: str, msgid: str, other_msgid: str) -> NoRetur def get_active_msgids(self, msgid_or_symbol: str) -> List[str]: """Return msgids but the input can be a symbol. - self.__active_msgids is used to implement a primitive cache for this function. + self.__active_msgids is used to implement a primitive cache for + this function. """ try: return self.__active_msgids[msgid_or_symbol] diff --git a/pylint/pyreverse/diadefslib.py b/pylint/pyreverse/diadefslib.py index 6fa8ea107b..6b4e0ce045 100644 --- a/pylint/pyreverse/diadefslib.py +++ b/pylint/pyreverse/diadefslib.py @@ -184,12 +184,12 @@ def visit_importfrom(self, node: nodes.ImportFrom) -> None: class ClassDiadefGenerator(DiaDefGenerator): """Generate a class diagram definition including all classes related to a - given class + given class. """ def class_diagram(self, project, klass): """Return a class diagram definition for the given klass and its - related klasses + related klasses. """ self.classdiagram = ClassDiagram(klass, self.config.mode) diff --git a/pylint/pyreverse/diagrams.py b/pylint/pyreverse/diagrams.py index fa88e38165..5f813c54a3 100644 --- a/pylint/pyreverse/diagrams.py +++ b/pylint/pyreverse/diagrams.py @@ -232,7 +232,7 @@ def module(self, name): def get_module(self, name, node): """Return a module by its name, looking also for relative imports; - raise KeyError if not found + raise KeyError if not found. """ for mod in self.modules(): mod_name = mod.node.name diff --git a/pylint/pyreverse/dot_printer.py b/pylint/pyreverse/dot_printer.py index fccdcf5579..0b4fc4ec46 100644 --- a/pylint/pyreverse/dot_printer.py +++ b/pylint/pyreverse/dot_printer.py @@ -8,7 +8,9 @@ # Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html # For details: https://github.com/PyCQA/pylint/blob/main/LICENSE -"""Class to generate files in dot format and image formats supported by Graphviz.""" +"""Class to generate files in dot format and image formats supported by +Graphviz. +""" import os import subprocess import sys diff --git a/pylint/pyreverse/inspector.py b/pylint/pyreverse/inspector.py index 61c8d1f7ea..808f34266c 100644 --- a/pylint/pyreverse/inspector.py +++ b/pylint/pyreverse/inspector.py @@ -15,6 +15,7 @@ # For details: https://github.com/PyCQA/pylint/blob/main/LICENSE """Visitor doing some postprocessing on the astroid tree. + Try to resolve definitions (namespace) dictionary, relationship... """ import collections diff --git a/pylint/pyreverse/main.py b/pylint/pyreverse/main.py index e955388cb0..997931a057 100644 --- a/pylint/pyreverse/main.py +++ b/pylint/pyreverse/main.py @@ -22,7 +22,7 @@ """%prog [options] . - create UML diagrams for classes and modules in +create UML diagrams for classes and modules in """ import sys from typing import Iterable diff --git a/pylint/pyreverse/plantuml_printer.py b/pylint/pyreverse/plantuml_printer.py index 2e643fe1fe..9a331d1d7e 100644 --- a/pylint/pyreverse/plantuml_printer.py +++ b/pylint/pyreverse/plantuml_printer.py @@ -3,7 +3,9 @@ # Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html # For details: https://github.com/PyCQA/pylint/blob/main/LICENSE -"""Class to generate files in dot format and image formats supported by Graphviz.""" +"""Class to generate files in dot format and image formats supported by +Graphviz. +""" from typing import Dict, Optional from pylint.pyreverse.printer import EdgeType, Layout, NodeProperties, NodeType, Printer diff --git a/pylint/pyreverse/printer.py b/pylint/pyreverse/printer.py index 559e456be3..6d25191771 100644 --- a/pylint/pyreverse/printer.py +++ b/pylint/pyreverse/printer.py @@ -71,7 +71,9 @@ def _dec_indent(self) -> None: @abstractmethod def _open_graph(self) -> None: - """Emit the header lines, i.e. all boilerplate code that defines things like layout etc.""" + """Emit the header lines, i.e. all boilerplate code that defines things + like layout etc. + """ def emit(self, line: str, force_newline: Optional[bool] = True) -> None: if force_newline and not line.endswith("\n"): @@ -85,7 +87,10 @@ def emit_node( type_: NodeType, properties: Optional[NodeProperties] = None, ) -> None: - """Create a new node. Nodes can be classes, packages, participants etc.""" + """Create a new node. + + Nodes can be classes, packages, participants etc. + """ @abstractmethod def emit_edge( diff --git a/pylint/pyreverse/utils.py b/pylint/pyreverse/utils.py index 5cb4138e74..72688d5b59 100644 --- a/pylint/pyreverse/utils.py +++ b/pylint/pyreverse/utils.py @@ -61,7 +61,9 @@ def insert_default_options(): def get_visibility(name): - """Return the visibility from a name: public, protected, private or special.""" + """Return the visibility from a name: public, protected, private or + special. + """ if SPECIAL.match(name): visibility = "special" elif PRIVATE.match(name): @@ -80,14 +82,14 @@ def get_visibility(name): def is_abstract(node): """Return true if the given class node correspond to an abstract class - definition + definition. """ return ABSTRACT.match(node.name) def is_final(node): """Return true if the given class/function node correspond to final - definition + definition. """ return FINAL.match(node.name) @@ -269,8 +271,8 @@ def get_annotation( def infer_node(node: Union[nodes.AssignAttr, nodes.AssignName]) -> set: - """Return a set containing the node annotation if it exists - otherwise return a set of the inferred types using the NodeNG.infer method + """Return a set containing the node annotation if it exists otherwise + return a set of the inferred types using the NodeNG.infer method. """ ann = get_annotation(node) @@ -286,8 +288,9 @@ def infer_node(node: Union[nodes.AssignAttr, nodes.AssignName]) -> set: def check_graphviz_availability(): """Check if the ``dot`` command is available on the machine. - This is needed if image output is desired and ``dot`` is used to convert - from *.dot or *.gv into the final output format. + + This is needed if image output is desired and ``dot`` is used to + convert from *.dot or *.gv into the final output format. """ if shutil.which("dot") is None: print( diff --git a/pylint/pyreverse/vcg_printer.py b/pylint/pyreverse/vcg_printer.py index f7e2a46652..f79f42267a 100644 --- a/pylint/pyreverse/vcg_printer.py +++ b/pylint/pyreverse/vcg_printer.py @@ -13,10 +13,11 @@ # Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html # For details: https://github.com/PyCQA/pylint/blob/main/LICENSE -"""Functions to generate files readable with Georg Sander's vcg -(Visualization of Compiler Graphs). -You can download vcg at https://rw4.cs.uni-sb.de/~sander/html/gshome.html -Note that vcg exists as a debian package. +"""Functions to generate files readable with Georg Sander's vcg (Visualization +of Compiler Graphs). + +You can download vcg at https://rw4.cs.uni- +sb.de/~sander/html/gshome.html Note that vcg exists as a debian package. See vcg's documentation for explanation about the different values that maybe used for the functions parameters. """ @@ -212,7 +213,10 @@ def emit_node( type_: NodeType, properties: Optional[NodeProperties] = None, ) -> None: - """Create a new node. Nodes can be classes, packages, participants etc.""" + """Create a new node. + + Nodes can be classes, packages, participants etc. + """ if properties is None: properties = NodeProperties(label=name) elif properties.label is None: diff --git a/pylint/reporters/base_reporter.py b/pylint/reporters/base_reporter.py index 570175567b..b758e47f07 100644 --- a/pylint/reporters/base_reporter.py +++ b/pylint/reporters/base_reporter.py @@ -69,12 +69,11 @@ def _display(self, layout: "Section") -> None: def display_messages(self, layout: Optional["Section"]) -> None: """Hook for displaying the messages of the reporter. - This will be called whenever the underlying messages - needs to be displayed. For some reporters, it probably - doesn't make sense to display messages as soon as they - are available, so some mechanism of storing them could be used. - This method can be implemented to display them after they've - been aggregated. + This will be called whenever the underlying messages needs to be + displayed. For some reporters, it probably doesn't make sense to + display messages as soon as they are available, so some + mechanism of storing them could be used. This method can be + implemented to display them after they've been aggregated. """ # Event callbacks diff --git a/pylint/reporters/multi_reporter.py b/pylint/reporters/multi_reporter.py index a68c8c423d..c2c7382d29 100644 --- a/pylint/reporters/multi_reporter.py +++ b/pylint/reporters/multi_reporter.py @@ -49,9 +49,10 @@ def out(self): @out.setter def out(self, output: Optional[AnyFile] = None): - """MultiReporter doesn't have its own output. This method is only - provided for API parity with BaseReporter and should not be called - with non-None values for 'output'. + """MultiReporter doesn't have its own output. + + This method is only provided for API parity with BaseReporter + and should not be called with non-None values for 'output'. """ self.__out = None if output is not None: diff --git a/pylint/reporters/reports_handler_mix_in.py b/pylint/reporters/reports_handler_mix_in.py index 8f65397b5b..20119a9237 100644 --- a/pylint/reporters/reports_handler_mix_in.py +++ b/pylint/reporters/reports_handler_mix_in.py @@ -25,8 +25,8 @@ class ReportsHandlerMixIn: - """A mix-in class containing all the reports and stats manipulation related methods - for the main lint class. + """A mix-in class containing all the reports and stats manipulation related + methods for the main lint class. """ def __init__(self) -> None: diff --git a/pylint/reporters/text.py b/pylint/reporters/text.py index 6743ab5b15..0d18c14405 100644 --- a/pylint/reporters/text.py +++ b/pylint/reporters/text.py @@ -188,7 +188,9 @@ def __init__(self, output: Optional[TextIO] = None) -> None: """The output format template with any unrecognized arguments removed.""" def on_set_current_module(self, module: str, filepath: Optional[str]) -> None: - """Set the format template to be used and check for unrecognized arguments.""" + """Set the format template to be used and check for unrecognized + arguments. + """ template = str(self.linter.config.msg_template or self._template) # Return early if the template is the same as the previous one @@ -210,7 +212,9 @@ def on_set_current_module(self, module: str, filepath: Optional[str]) -> None: self._fixed_template = template def write_message(self, msg: Message) -> None: - """Convenience method to write a formatted message with class default template.""" + """Convenience method to write a formatted message with class default + template. + """ self_dict = msg._asdict() for key in ("end_line", "end_column"): self_dict[key] = self_dict[key] or "" @@ -328,8 +332,8 @@ def _get_decoration(self, msg_id: str) -> MessageStyle: return self.color_mapping.get(msg_id[0]) or MessageStyle(None) def handle_message(self, msg: Message) -> None: - """Manage message of different types, and colorize output - using ansi escape codes + """Manage message of different types, and colorize output using ansi + escape codes. """ if msg.module not in self._modules: msg_style = self._get_decoration("S") diff --git a/pylint/reporters/ureports/base_writer.py b/pylint/reporters/ureports/base_writer.py index dba57c86fe..f59651709b 100644 --- a/pylint/reporters/ureports/base_writer.py +++ b/pylint/reporters/ureports/base_writer.py @@ -13,8 +13,8 @@ """Universal report objects and some formatting drivers. -A way to create simple reports using python objects, primarily designed to be -formatted as text and html. +A way to create simple reports using python objects, primarily designed +to be formatted as text and html. """ import sys from io import StringIO @@ -35,8 +35,8 @@ class BaseWriter: def format(self, layout, stream: TextIO = sys.stdout, encoding=None) -> None: """Format and write the given layout into the stream object. - unicode policy: unicode strings may be found in the layout; - try to call 'stream.write' with it, but give it back encoded using + unicode policy: unicode strings may be found in the layout; try + to call 'stream.write' with it, but give it back encoded using the given encoding if it fails """ if not encoding: @@ -50,8 +50,8 @@ def format(self, layout, stream: TextIO = sys.stdout, encoding=None) -> None: def format_children( self, layout: Union["EvaluationSection", "Paragraph", "Section"] ) -> None: - """Recurse on the layout children and call their accept method - (see the Visitor pattern) + """Recurse on the layout children and call their accept method (see the + Visitor pattern) """ for child in getattr(layout, "children", ()): child.accept(self) @@ -74,7 +74,8 @@ def end_format(self) -> None: def get_table_content(self, table: "Table") -> List[List[str]]: """Trick to get table content without actually writing it. - return an aligned list of lists containing table cells values as string + return an aligned list of lists containing table cells values as + string """ result: List[List[str]] = [[]] cols = table.cols @@ -90,7 +91,7 @@ def get_table_content(self, table: "Table") -> List[List[str]]: def compute_content(self, layout) -> Iterator[str]: """Trick to compute the formatting of children layout before actually - writing it + writing it. return an iterator on strings (one for each child element) """ diff --git a/pylint/reporters/ureports/text_writer.py b/pylint/reporters/ureports/text_writer.py index cb80e67713..5c1626270b 100644 --- a/pylint/reporters/ureports/text_writer.py +++ b/pylint/reporters/ureports/text_writer.py @@ -32,8 +32,8 @@ class TextWriter(BaseWriter): - """Format layouts as text - (ReStructured inspiration but not totally handled yet) + """Format layouts as text (ReStructured inspiration but not totally handled + yet) """ def __init__(self): diff --git a/pylint/testutils/configuration_test.py b/pylint/testutils/configuration_test.py index b699f32428..bccda08464 100644 --- a/pylint/testutils/configuration_test.py +++ b/pylint/testutils/configuration_test.py @@ -32,7 +32,9 @@ def get_expected_or_default( suffix: str, default: str, ) -> str: - """Return the expected value from the file if it exists, or the given default.""" + """Return the expected value from the file if it exists, or the given + default. + """ expected = default path = Path(tested_configuration_file) expected_result_path = path.parent / f"{path.stem}.{suffix}" @@ -55,7 +57,9 @@ def get_expected_or_default( def get_expected_configuration( configuration_path: str, default_configuration: PylintConfiguration ) -> PylintConfiguration: - """Get the expected parsed configuration of a configuration functional test.""" + """Get the expected parsed configuration of a configuration functional + test. + """ result = copy.deepcopy(default_configuration) config_as_json = get_expected_or_default( configuration_path, suffix="result.json", default="{}" @@ -141,7 +145,9 @@ def get_expected_output( def run_using_a_configuration_file( configuration_path: Union[Path, str], file_to_lint: str = __file__ ) -> Tuple[Mock, Mock, Run]: - """Simulate a run with a configuration without really launching the checks.""" + """Simulate a run with a configuration without really launching the + checks. + """ configuration_path = str(configuration_path) args = ["--rcfile", configuration_path, file_to_lint] # We do not capture the `SystemExit` as then the `runner` variable diff --git a/pylint/testutils/decorator.py b/pylint/testutils/decorator.py index 21ddaaa8ef..a6abd551b5 100644 --- a/pylint/testutils/decorator.py +++ b/pylint/testutils/decorator.py @@ -12,8 +12,8 @@ def set_config(**kwargs): """Decorator for setting config values on a checker. - Passing the args and kwargs back to the test function itself allows this decorator - to be used on parametrized test cases. + Passing the args and kwargs back to the test function itself allows + this decorator to be used on parametrized test cases. """ def _wrapper(fun): diff --git a/pylint/testutils/functional/lint_module_output_update.py b/pylint/testutils/functional/lint_module_output_update.py index 0f76752ea8..03dc919b44 100644 --- a/pylint/testutils/functional/lint_module_output_update.py +++ b/pylint/testutils/functional/lint_module_output_update.py @@ -43,7 +43,9 @@ def _check_output_text( expected_output: List[OutputLine], actual_output: List[OutputLine], ) -> None: - """Overwrite or remove the expected output file based on actual output.""" + """Overwrite or remove the expected output file based on actual + output. + """ # Remove the file if no output is actually expected and a file exists if not expected_output and not actual_output: if os.path.exists(self._test_file.expected_output): diff --git a/pylint/testutils/lint_module_test.py b/pylint/testutils/lint_module_test.py index e121dc4813..7217a63fdd 100644 --- a/pylint/testutils/lint_module_test.py +++ b/pylint/testutils/lint_module_test.py @@ -142,7 +142,8 @@ def multiset_difference( ) -> Tuple[MessageCounter, Dict[Tuple[int, str], int]]: """Takes two multisets and compares them. - A multiset is a dict with the cardinality of the key as the value. + A multiset is a dict with the cardinality of the key as the + value. """ missing = expected_entries.copy() missing.subtract(actual_entries) diff --git a/pylint/testutils/output_line.py b/pylint/testutils/output_line.py index 0ca79dc6a0..abbb04d5e1 100644 --- a/pylint/testutils/output_line.py +++ b/pylint/testutils/output_line.py @@ -104,8 +104,8 @@ def _get_column(column: str) -> int: @staticmethod def _get_py38_none_value(value: T, check_endline: bool) -> Optional[T]: - """Used to make end_line and end_column None as indicated by our version - compared to `min_pyver_end_position`. + """Used to make end_line and end_column None as indicated by our + version compared to `min_pyver_end_position`. """ if not check_endline: return None # pragma: no cover @@ -115,8 +115,8 @@ def _get_py38_none_value(value: T, check_endline: bool) -> Optional[T]: def from_csv( cls, row: Union[Sequence[str], str], check_endline: bool = True ) -> "OutputLine": - """Create an OutputLine from a comma separated list (the functional tests - expected output .txt files). + """Create an OutputLine from a comma separated list (the functional + tests expected output .txt files). """ if isinstance(row, str): row = row.split(",") @@ -167,7 +167,9 @@ def from_csv( raise MalformedOutputLineException(row, e) from e def to_csv(self) -> Tuple[str, str, str, str, str, str, str, str]: - """Convert an OutputLine to a tuple of string to be written by a csv- writer.""" + """Convert an OutputLine to a tuple of string to be written by a csv- + writer. + """ return ( str(self.symbol), str(self.lineno), @@ -181,7 +183,9 @@ def to_csv(self) -> Tuple[str, str, str, str, str, str, str, str]: @staticmethod def _value_to_optional_int(value: Optional[str]) -> Optional[int]: - """Checks if a (stringified) value should be None or a Python integer.""" + """Checks if a (stringified) value should be None or a Python + integer. + """ if value == "None" or not value: return None return int(value) diff --git a/pylint/testutils/primer.py b/pylint/testutils/primer.py index 8113c44bee..27943b59aa 100644 --- a/pylint/testutils/primer.py +++ b/pylint/testutils/primer.py @@ -72,10 +72,11 @@ def pylint_args(self) -> List[str]: def lazy_clone(self) -> None: # pragma: no cover """Concatenates the target directory and clones the file. - Not expected to be tested as the primer won't work if it doesn't. It's tested in - the continuous integration primers, only the coverage is not calculated on - everything. If lazy clone breaks for local use we'll probably notice because - we'll have a fatal when launching the primer locally. + Not expected to be tested as the primer won't work if it + doesn't. It's tested in the continuous integration primers, only + the coverage is not calculated on everything. If lazy clone + breaks for local use we'll probably notice because we'll have a + fatal when launching the primer locally. """ logging.info("Lazy cloning %s", self.url) if not self.clone_directory.exists(): diff --git a/pylint/testutils/pyreverse.py b/pylint/testutils/pyreverse.py index 34d66d2177..7226c196aa 100644 --- a/pylint/testutils/pyreverse.py +++ b/pylint/testutils/pyreverse.py @@ -9,7 +9,8 @@ class PyreverseConfig: # pylint: disable=too-many-instance-attributes, too-many-arguments """Holds the configuration options for Pyreverse. - The default values correspond to the defaults of the options' parser. + The default values correspond to the defaults of the options' + parser. """ def __init__( diff --git a/pylint/typing.py b/pylint/typing.py index 84c49df558..1d0357d0ae 100644 --- a/pylint/typing.py +++ b/pylint/typing.py @@ -44,7 +44,9 @@ class ErrorDescriptionDict(TypedDict): class MessageLocationTuple(NamedTuple): - """Tuple with information about the location of a to-be-displayed message.""" + """Tuple with information about the location of a to-be-displayed + message. + """ abspath: str path: str diff --git a/pylint/utils/__init__.py b/pylint/utils/__init__.py index 53d07825ec..2564ef7552 100644 --- a/pylint/utils/__init__.py +++ b/pylint/utils/__init__.py @@ -40,8 +40,8 @@ # Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html # For details: https://github.com/PyCQA/pylint/blob/main/LICENSE -"""Some various utilities and helper classes, most of them used in the -main pylint class +"""Some various utilities and helper classes, most of them used in the main +pylint class. """ from pylint.utils.ast_walker import ASTWalker diff --git a/pylint/utils/file_state.py b/pylint/utils/file_state.py index 52761471f8..fda2bd0cbb 100644 --- a/pylint/utils/file_state.py +++ b/pylint/utils/file_state.py @@ -61,8 +61,8 @@ def _collect_block_lines( node: nodes.NodeNG, msg_state: MessageStateDict, ) -> None: - """Recursively walk (depth first) AST to collect block level options line - numbers. + """Recursively walk (depth first) AST to collect block level options + line numbers. """ for child in node.get_children(): self._collect_block_lines(msgs_store, child, msg_state) @@ -134,9 +134,9 @@ def handle_ignored_message( ) -> None: """Report an ignored message. - state_scope is either MSG_STATE_SCOPE_MODULE or MSG_STATE_SCOPE_CONFIG, - depending on whether the message was disabled locally in the module, or - globally. + state_scope is either MSG_STATE_SCOPE_MODULE or + MSG_STATE_SCOPE_CONFIG, depending on whether the message was + disabled locally in the module, or globally. """ if state_scope == MSG_STATE_SCOPE_MODULE: assert isinstance(line, int) # should always be int inside module scope diff --git a/pylint/utils/linterstats.py b/pylint/utils/linterstats.py index 0cf12ad43b..615eda6366 100644 --- a/pylint/utils/linterstats.py +++ b/pylint/utils/linterstats.py @@ -291,7 +291,9 @@ def increase_single_message_count(self, type_name: str, increase: int) -> None: def increase_single_module_message_count( self, modname: str, type_name: MessageTypesFullName, increase: int ) -> None: - """Increase the message type count of an individual message type of a module.""" + """Increase the message type count of an individual message type of a + module. + """ self.by_module[modname][type_name] += increase def reset_message_count(self) -> None: @@ -305,8 +307,8 @@ def reset_message_count(self) -> None: def merge_stats(stats: List[LinterStats]): - """Used to merge multiple stats objects into a new one when pylint is run in - parallel mode. + """Used to merge multiple stats objects into a new one when pylint is run + in parallel mode. """ merged = LinterStats() for stat in stats: diff --git a/pylint/utils/utils.py b/pylint/utils/utils.py index c3d120458e..4cf5001819 100644 --- a/pylint/utils/utils.py +++ b/pylint/utils/utils.py @@ -99,7 +99,7 @@ def cmp(a, b): def diff_string(old, new): """Given an old and new int value, return a string representing the - difference + difference. """ diff = abs(old - new) diff_str = f"{CMPS[cmp(old, new)]}{diff and f'{diff:.2f}' or ''}" @@ -124,12 +124,16 @@ def get_module_and_frameid(node): def get_rst_title(title, character): - """Permit to get a title formatted as ReStructuredText test (underlined with a chosen character).""" + """Permit to get a title formatted as ReStructuredText test (underlined + with a chosen character). + """ return f"{title}\n{character * len(title)}\n" def get_rst_section(section, options, doc=None): - """Format an option's section using as a ReStructuredText formatted output.""" + """Format an option's section using as a ReStructuredText formatted + output. + """ result = "" if section: result += get_rst_title(section, "'") @@ -168,7 +172,7 @@ def tokenize_module(node: nodes.Module) -> List[tokenize.TokenInfo]: def register_plugins(linter, directory): """Load all module and package in the given directory, looking for a - 'register' function in each one, used to register pylint checkers + 'register' function in each one, used to register pylint checkers. """ imported = {} for filename in os.listdir(directory): @@ -254,12 +258,12 @@ def get_global_option( option: GLOBAL_OPTION_NAMES, default: Optional[T_GlobalOptionReturnTypes] = None, ) -> Optional[T_GlobalOptionReturnTypes]: - """Retrieve an option defined by the given *checker* or - by all known option providers. + """Retrieve an option defined by the given *checker* or by all known option + providers. - It will look in the list of all options providers - until the given *option* will be found. - If the option wasn't found, the *default* value will be returned. + It will look in the list of all options providers until the given + *option* will be found. If the option wasn't found, the *default* + value will be returned. """ # First, try in the given checker's config. # After that, look in the options providers. diff --git a/script/bump_changelog.py b/script/bump_changelog.py index af32b3cdfd..0fb82382fc 100644 --- a/script/bump_changelog.py +++ b/script/bump_changelog.py @@ -1,8 +1,8 @@ # ORIGINAL here: https://github.com/PyCQA/astroid/blob/main/script/bump_changelog.py # DO NOT MODIFY DIRECTLY -"""This script permits to upgrade the changelog in astroid or pylint when releasing a -version. +"""This script permits to upgrade the changelog in astroid or pylint when +releasing a version. """ # pylint: disable=logging-fstring-interpolation import argparse diff --git a/tests/benchmark/test_baseline_benchmarks.py b/tests/benchmark/test_baseline_benchmarks.py index 5617996dfd..c2b3fcc801 100644 --- a/tests/benchmark/test_baseline_benchmarks.py +++ b/tests/benchmark/test_baseline_benchmarks.py @@ -37,10 +37,11 @@ def _empty_filepath(): class SleepingChecker(BaseChecker): - """A checker that sleeps, the wall-clock time should reduce as we add workers. + """A checker that sleeps, the wall-clock time should reduce as we add + workers. - As we apply a roughly constant amount of "work" in this checker any variance is - likely to be caused by the pylint system. + As we apply a roughly constant amount of "work" in this checker any + variance is likely to be caused by the pylint system. """ __implements__ = (pylint.interfaces.IRawChecker,) @@ -58,16 +59,18 @@ class SleepingChecker(BaseChecker): def process_module(self, _node: nodes.Module) -> None: """Sleeps for `sleep_duration` on each call. - This effectively means each file costs ~`sleep_duration`+framework overhead + This effectively means each file costs + ~`sleep_duration`+framework overhead """ time.sleep(self.sleep_duration) class SleepingCheckerLong(BaseChecker): - """A checker that sleeps, the wall-clock time should reduce as we add workers. + """A checker that sleeps, the wall-clock time should reduce as we add + workers. - As we apply a roughly constant amount of "work" in this checker any variance is - likely to be caused by the pylint system. + As we apply a roughly constant amount of "work" in this checker any + variance is likely to be caused by the pylint system. """ __implements__ = (pylint.interfaces.IRawChecker,) @@ -85,13 +88,16 @@ class SleepingCheckerLong(BaseChecker): def process_module(self, _node: nodes.Module) -> None: """Sleeps for `sleep_duration` on each call. - This effectively means each file costs ~`sleep_duration`+framework overhead + This effectively means each file costs + ~`sleep_duration`+framework overhead """ time.sleep(self.sleep_duration) class NoWorkChecker(BaseChecker): - """A checker that sleeps, the wall-clock time should change as we add threads.""" + """A checker that sleeps, the wall-clock time should change as we add + threads. + """ __implements__ = (pylint.interfaces.IRawChecker,) @@ -114,8 +120,8 @@ def process_module(self, _node: nodes.Module) -> None: class TestEstablishBaselineBenchmarks: """Naive benchmarks for the high-level pylint framework. - Because this benchmarks the fundamental and common parts and changes seen here will - impact everything else + Because this benchmarks the fundamental and common parts and changes + seen here will impact everything else """ empty_filepath = _empty_filepath() @@ -180,8 +186,9 @@ def test_baseline_benchmark_check_parallel_j2(self, benchmark): def test_baseline_lots_of_files_j1(self, benchmark): """Establish a baseline with only 'master' checker being run in -j1. - We do not register any checkers except the default 'master', so the cost is just - that of the system with a lot of files registered + We do not register any checkers except the default 'master', so + the cost is just that of the system with a lot of files + registered """ if benchmark.disabled: benchmark(print, "skipping, only benchmark large file counts") @@ -217,10 +224,11 @@ def test_baseline_lots_of_files_j2(self, benchmark): ), f"Expected no errors to be thrown: {pprint.pformat(linter.reporter.messages)}" def test_baseline_lots_of_files_j1_empty_checker(self, benchmark): - """Baselines pylint for a single extra checker being run in -j1, for N-files. + """Baselines pylint for a single extra checker being run in -j1, for + N-files. - We use a checker that does no work, so the cost is just that of the system at - scale + We use a checker that does no work, so the cost is just that of + the system at scale """ if benchmark.disabled: benchmark(print, "skipping, only benchmark large file counts") @@ -239,8 +247,8 @@ def test_baseline_lots_of_files_j1_empty_checker(self, benchmark): def test_baseline_lots_of_files_j2_empty_checker(self, benchmark): """Baselines pylint for a single extra checker being run in -j2, for N-files. - We use a checker that does no work, so the cost is just that of the system at - scale, across workers + We use a checker that does no work, so the cost is just that of + the system at scale, across workers """ if benchmark.disabled: benchmark(print, "skipping, only benchmark large file counts") diff --git a/tests/checkers/unittest_format.py b/tests/checkers/unittest_format.py index 6e0025a9f5..48d0c1fca6 100644 --- a/tests/checkers/unittest_format.py +++ b/tests/checkers/unittest_format.py @@ -108,8 +108,8 @@ def testCheckKeywordParensHandlesUnnecessaryParens(self) -> None: self.checker._check_keyword_parentheses(_tokenize_str(code), offset) def testNoSuperfluousParensWalrusOperatorIf(self) -> None: - """Parenthesis change the meaning of assignment in the walrus operator and so - are not always superfluous: + """Parenthesis change the meaning of assignment in the walrus operator + and so are not always superfluous: """ cases = [ ("if (odd := is_odd(i))\n"), @@ -164,7 +164,8 @@ class TestCheckSpace(CheckerTestCase): def test_encoding_token(self) -> None: """Make sure the encoding token doesn't change the checker's behavior. - _tokenize_str doesn't produce an encoding token, but reading a file does + _tokenize_str doesn't produce an encoding token, but reading a + file does """ with self.assertNoMessages(): encoding_token = tokenize.TokenInfo( @@ -177,8 +178,8 @@ def test_encoding_token(self) -> None: def test_disable_global_option_end_of_line() -> None: - """Test for issue with disabling tokenizer messages that extend beyond the scope of - the ast tokens. + """Test for issue with disabling tokenizer messages that extend beyond the + scope of the ast tokens. """ file_ = tempfile.NamedTemporaryFile("w", delete=False) with file_: diff --git a/tests/checkers/unittest_non_ascii_name.py b/tests/checkers/unittest_non_ascii_name.py index 4d69a65194..85e813a43c 100644 --- a/tests/checkers/unittest_non_ascii_name.py +++ b/tests/checkers/unittest_non_ascii_name.py @@ -131,7 +131,9 @@ def test_assignname( code: str, assign_type: str, ): - """Variables defined no matter where, should be checked for non ascii.""" + """Variables defined no matter where, should be checked for non + ascii. + """ assign_node = astroid.extract_node(code) if not isinstance(assign_node, nodes.AssignName): @@ -256,7 +258,9 @@ def test_assignname( ], ) def test_check_import(self, import_statement: str, wrong_name: Optional[str]): - """We expect that for everything that user can change there is a message.""" + """We expect that for everything that user can change there is a + message. + """ node = astroid.extract_node(f"{import_statement} #@") expected_msgs: Iterable[pylint.testutils.MessageTest] = tuple() diff --git a/tests/checkers/unittest_stdlib.py b/tests/checkers/unittest_stdlib.py index cc8991b9df..fadc1666a7 100644 --- a/tests/checkers/unittest_stdlib.py +++ b/tests/checkers/unittest_stdlib.py @@ -43,10 +43,11 @@ class TestStdlibChecker(CheckerTestCase): def test_deprecated_no_qname_on_unexpected_nodes(self) -> None: """Test that we don't crash on nodes which don't have a qname method. - While this test might seem weird since it uses a transform, it's actually - testing a crash that happened in production, but there was no way to retrieve - the code for which this occurred (how an AssignAttr got to be the result of a - function inference beats me...) + While this test might seem weird since it uses a transform, it's + actually testing a crash that happened in production, but there + was no way to retrieve the code for which this occurred (how an + AssignAttr got to be the result of a function inference beats + me...) """ def infer_func( diff --git a/tests/checkers/unittest_typecheck.py b/tests/checkers/unittest_typecheck.py index 4564a461ef..464a33d8b2 100644 --- a/tests/checkers/unittest_typecheck.py +++ b/tests/checkers/unittest_typecheck.py @@ -124,7 +124,9 @@ def __getitem__(self, item): self.decorated_by_unsubscriptable_then_subscriptable_class(decorators) def getitem_on_modules(self) -> None: - """Mainly validate the code won't crash if we're not having a function.""" + """Mainly validate the code won't crash if we're not having a + function. + """ module = astroid.parse( """ import collections diff --git a/tests/checkers/unittest_unicode/__init__.py b/tests/checkers/unittest_unicode/__init__.py index 7a748245af..69982c7292 100644 --- a/tests/checkers/unittest_unicode/__init__.py +++ b/tests/checkers/unittest_unicode/__init__.py @@ -68,9 +68,10 @@ class FakeNode: """Simple Faker representing a Module node. - Astroid crashes in a number of cases if we want to lint unsupported encodings. - So, this is used to test the behaviour of the encoding checker. - This shall ensure that our checks keep working once Python supports UTF16/32. + Astroid crashes in a number of cases if we want to lint unsupported + encodings. So, this is used to test the behaviour of the encoding + checker. This shall ensure that our checks keep working once Python + supports UTF16/32. """ file: Path diff --git a/tests/checkers/unittest_unicode/unittest_bad_chars.py b/tests/checkers/unittest_unicode/unittest_bad_chars.py index 1f1e1e8d96..ab6fbc9f11 100644 --- a/tests/checkers/unittest_unicode/unittest_bad_chars.py +++ b/tests/checkers/unittest_unicode/unittest_bad_chars.py @@ -113,11 +113,12 @@ def test_find_bad_chars( line_ending: str, add_invalid_bytes: bool, ): - """All combinations of bad characters that are accepted by Python at the moment - are tested in all possible combinations of - - line ending - - encoding - - including not encode-able byte (or not) + """All combinations of bad characters that are accepted by Python at + the moment are tested in all possible combinations of. + + - line ending + - encoding + - including not encode-able byte (or not) """ codec, start_msg = codec_and_msg @@ -207,8 +208,8 @@ def test_bad_chars_that_would_currently_crash_python( msg_id: str, codec_and_msg: Tuple[str, Tuple[pylint.testutils.MessageTest]], ): - """Special test for a file containing chars that lead to - Python or Astroid crashes (which causes Pylint to exit early) + """Special test for a file containing chars that lead to Python or + Astroid crashes (which causes Pylint to exit early) """ codec, start_msg = codec_and_msg # Create file that will fail loading in astroid. @@ -253,7 +254,9 @@ def test_bad_chars_that_would_currently_crash_python( ], ) def test___check_invalid_chars(self, char: str, msg: str, codec: str) -> None: - """Check function should deliver correct column no matter which codec we used.""" + """Check function should deliver correct column no matter which codec + we used. + """ with self.assertAddsMessages( pylint.testutils.MessageTest( msg_id=msg, diff --git a/tests/checkers/unittest_unicode/unittest_bidirectional_unicode.py b/tests/checkers/unittest_unicode/unittest_bidirectional_unicode.py index 2416957c92..ed875aef9b 100644 --- a/tests/checkers/unittest_unicode/unittest_bidirectional_unicode.py +++ b/tests/checkers/unittest_unicode/unittest_bidirectional_unicode.py @@ -22,8 +22,9 @@ class TestBidirectionalUnicodeChecker(pylint.testutils.CheckerTestCase): checker: pylint.checkers.unicode.UnicodeChecker def test_finds_bidirectional_unicode_that_currently_not_parsed(self): - """Test an example from https://github.com/nickboucher/trojan-source/tree/main/Python - that is currently not working Python but producing a syntax error + """Test an example from https://github.com/nickboucher/trojan- + source/tree/main/Python that is currently not working Python but + producing a syntax error. So we test this to make sure it stays like this """ diff --git a/tests/checkers/unittest_unicode/unittest_functions.py b/tests/checkers/unittest_unicode/unittest_functions.py index d14d2fdadf..3ab01aa99f 100644 --- a/tests/checkers/unittest_unicode/unittest_functions.py +++ b/tests/checkers/unittest_unicode/unittest_functions.py @@ -102,7 +102,9 @@ def test_map_positions_to_result( expected: Dict[int, pylint.checkers.unicode._BadChar], search_dict, ): - """Test all possible outcomes for map position function in UTF-8 and ASCII.""" + """Test all possible outcomes for map position function in UTF-8 and + ASCII. + """ if isinstance(line, bytes): newline = b"\n" else: diff --git a/tests/checkers/unittest_unicode/unittest_invalid_encoding.py b/tests/checkers/unittest_unicode/unittest_invalid_encoding.py index 5facf9a059..70e22547b7 100644 --- a/tests/checkers/unittest_unicode/unittest_invalid_encoding.py +++ b/tests/checkers/unittest_unicode/unittest_invalid_encoding.py @@ -121,7 +121,9 @@ def test_invalid_unicode_files(self, tmp_path: Path, test_file: str, line_no: in ], ) def test__determine_codec(self, content: bytes, codec: str, line: int): - """The codec determined should be exact no matter what we throw at it.""" + """The codec determined should be exact no matter what we throw at + it. + """ assert self.checker._determine_codec(io.BytesIO(content)) == (codec, line) def test__determine_codec_raises_syntax_error_on_invalid_input(self): diff --git a/tests/checkers/unittest_variables.py b/tests/checkers/unittest_variables.py index 3527034039..6c77c43872 100644 --- a/tests/checkers/unittest_variables.py +++ b/tests/checkers/unittest_variables.py @@ -196,7 +196,9 @@ def test_nested_lambda(self) -> None: @set_config(ignored_argument_names=re.compile("arg")) def test_ignored_argument_names_no_message(self) -> None: - """Make sure is_ignored_argument_names properly ignores function arguments.""" + """Make sure is_ignored_argument_names properly ignores function + arguments. + """ node = astroid.parse( """ def fooby(arg): diff --git a/tests/config/test_functional_config_loading.py b/tests/config/test_functional_config_loading.py index 3f571aa338..e6cc328be3 100644 --- a/tests/config/test_functional_config_loading.py +++ b/tests/config/test_functional_config_loading.py @@ -1,8 +1,8 @@ # Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html # For details: https://github.com/PyCQA/pylint/blob/main/LICENSE -"""This launches the configuration functional tests. This permits to test configuration -files by providing a file with the appropriate extension in the +"""This launches the configuration functional tests. This permits to test +configuration files by providing a file with the appropriate extension in the ``tests/config/functional`` directory. Let's say you have a regression_list_crash.toml file to test. Then, if there is an error in the conf, diff --git a/tests/config/unittest_config.py b/tests/config/unittest_config.py index 80f895000d..63a09e2882 100644 --- a/tests/config/unittest_config.py +++ b/tests/config/unittest_config.py @@ -76,8 +76,8 @@ def test__regexp_csv_validator_invalid() -> None: class TestPyLinterOptionSetters(CheckerTestCase): - """Class to check the set_config decorator and get_global_option util for options - declared in PyLinter. + """Class to check the set_config decorator and get_global_option util for + options declared in PyLinter. """ class Checker(BaseChecker): diff --git a/tests/extensions/test_check_docs_utils.py b/tests/extensions/test_check_docs_utils.py index 77b7938681..f1a7c098d6 100644 --- a/tests/extensions/test_check_docs_utils.py +++ b/tests/extensions/test_check_docs_utils.py @@ -12,8 +12,8 @@ # Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html # For details: https://github.com/PyCQA/pylint/blob/main/LICENSE -"""Unit tests for the pylint checkers in :mod:`pylint.extensions.check_docs`, in -particular the parameter documentation checker `DocstringChecker` +"""Unit tests for the pylint checkers in :mod:`pylint.extensions.check_docs`, +in particular the parameter documentation checker `DocstringChecker` """ import astroid import pytest diff --git a/tests/lint/test_utils.py b/tests/lint/test_utils.py index a1fa536140..2ee85e7792 100644 --- a/tests/lint/test_utils.py +++ b/tests/lint/test_utils.py @@ -38,7 +38,9 @@ def test_get_fatal_error_message() -> None: def test_issue_template_on_fatal_errors(capsys: pytest.CaptureFixture) -> None: - """Test that we also create an issue template if the offending exception isn't from astroid.""" + """Test that we also create an issue template if the offending exception + isn't from astroid. + """ with pytest.raises(SystemExit): with unittest.mock.patch( "astroid.MANAGER.ast_from_file", side_effect=RecursionError() diff --git a/tests/lint/unittest_lint.py b/tests/lint/unittest_lint.py index 9dd6ae4dec..0a6513ecd1 100644 --- a/tests/lint/unittest_lint.py +++ b/tests/lint/unittest_lint.py @@ -814,8 +814,8 @@ def should_analyze_file(modname: str, path: str, is_argument: bool = False) -> b def test_custom_should_analyze_file() -> None: - """Check that we can write custom should_analyze_file that work - even for arguments. + """Check that we can write custom should_analyze_file that work even for + arguments. """ package_dir = os.path.join(REGRTEST_DATA_DIR, "bad_package") wrong_file = os.path.join(package_dir, "wrong.py") diff --git a/tests/primer/test_primer_external.py b/tests/primer/test_primer_external.py index d9d4136ccc..639a5483db 100644 --- a/tests/primer/test_primer_external.py +++ b/tests/primer/test_primer_external.py @@ -68,11 +68,13 @@ def test_primer_external_packages_no_crash_batch_two( @staticmethod def _primer_test(package: PackageToLint, caplog: LogCaptureFixture) -> None: - """Runs pylint over external packages to check for crashes and fatal messages. + """Runs pylint over external packages to check for crashes and fatal + messages. - We only check for crashes (bit-encoded exit code 32) and fatal messages (bit- - encoded exit code 1). We assume that these external repositories do not have any - fatal errors in their code so that any fatal errors are pylint false positives + We only check for crashes (bit-encoded exit code 32) and fatal + messages (bit- encoded exit code 1). We assume that these + external repositories do not have any fatal errors in their code + so that any fatal errors are pylint false positives """ caplog.set_level(logging.INFO) package.lazy_clone() diff --git a/tests/pyreverse/test_diadefs.py b/tests/pyreverse/test_diadefs.py index d9a3589360..f71fdf2e73 100644 --- a/tests/pyreverse/test_diadefs.py +++ b/tests/pyreverse/test_diadefs.py @@ -127,8 +127,8 @@ def test_exctract_relations( def test_functional_relation_extraction( self, default_config: PyreverseConfig, get_project: Callable ) -> None: - """Functional test of relations extraction; - different classes possibly in different modules + """Functional test of relations extraction; different classes possibly + in different modules. """ # XXX should be catching pyreverse environment problem but doesn't # pyreverse doesn't extract the relations but this test ok diff --git a/tests/pyreverse/test_utils.py b/tests/pyreverse/test_utils.py index 60c3a77c18..425eff54bf 100644 --- a/tests/pyreverse/test_utils.py +++ b/tests/pyreverse/test_utils.py @@ -84,8 +84,8 @@ class A: @patch("pylint.pyreverse.utils.get_annotation") @patch("astroid.node_classes.NodeNG.infer", side_effect=astroid.InferenceError) def test_infer_node_1(mock_infer: Any, mock_get_annotation: Any) -> None: - """Return set() when astroid.InferenceError is raised and an annotation has not been - returned. + """Return set() when astroid.InferenceError is raised and an annotation has + not been returned. """ mock_get_annotation.return_value = None node = astroid.extract_node("a: str = 'mystr'") @@ -97,8 +97,8 @@ def test_infer_node_1(mock_infer: Any, mock_get_annotation: Any) -> None: @patch("pylint.pyreverse.utils.get_annotation") @patch("astroid.node_classes.NodeNG.infer") def test_infer_node_2(mock_infer: Any, mock_get_annotation: Any) -> None: - """Return set(node.infer()) when InferenceError is not raised and an annotation has - not been returned. + """Return set(node.infer()) when InferenceError is not raised and an + annotation has not been returned. """ mock_get_annotation.return_value = None node = astroid.extract_node("a: str = 'mystr'") @@ -108,8 +108,8 @@ def test_infer_node_2(mock_infer: Any, mock_get_annotation: Any) -> None: def test_infer_node_3() -> None: - """Return a set containing a nodes.ClassDef object when the attribute has a type - annotation. + """Return a set containing a nodes.ClassDef object when the attribute has a + type annotation. """ node = astroid.extract_node( """ @@ -127,7 +127,9 @@ def __init__(self, component: Component): def test_infer_node_4() -> None: - """Verify the label for an argument with a typehint of the type nodes.Subscript.""" + """Verify the label for an argument with a typehint of the type + nodes.Subscript. + """ node = astroid.extract_node( """ class MyClass: diff --git a/tests/test_check_parallel.py b/tests/test_check_parallel.py index 5bc1a6f279..88b54b8e77 100644 --- a/tests/test_check_parallel.py +++ b/tests/test_check_parallel.py @@ -51,7 +51,9 @@ def _gen_file_datas(count: int = 1) -> List[FileItem]: class SequentialTestChecker(BaseChecker): - """A checker that does not need to consolidate data across run invocations.""" + """A checker that does not need to consolidate data across run + invocations. + """ __implements__ = (pylint.interfaces.IRawChecker,) @@ -139,7 +141,9 @@ def process_module(self, _node: nodes.Module) -> None: class ExtraSequentialTestChecker(SequentialTestChecker): - """A checker that does not need to consolidate data across run invocations.""" + """A checker that does not need to consolidate data across run + invocations. + """ name = "extra-sequential-checker" test_data = "extra-sequential" @@ -153,7 +157,9 @@ class ExtraParallelTestChecker(ParallelTestChecker): class ThirdSequentialTestChecker(SequentialTestChecker): - """A checker that does not need to consolidate data across run invocations.""" + """A checker that does not need to consolidate data across run + invocations. + """ name = "third-sequential-checker" test_data = "third-sequential" @@ -181,7 +187,8 @@ def test_worker_initialize(self) -> None: assert isinstance(pylint.lint.parallel._worker_linter, type(linter)) def test_worker_initialize_pickling(self) -> None: - """Test that we can pickle objects that standard pickling in multiprocessing can't. + """Test that we can pickle objects that standard pickling in + multiprocessing can't. See: https://stackoverflow.com/questions/8804830/python-multiprocessing-picklingerror-cant-pickle-type-function @@ -240,7 +247,9 @@ def test_worker_check_single_file_no_checkers(self) -> None: assert stats.warning == 0 def test_worker_check_sequential_checker(self) -> None: - """Same as test_worker_check_single_file_no_checkers with SequentialTestChecker.""" + """Same as test_worker_check_single_file_no_checkers with + SequentialTestChecker. + """ linter = PyLinter(reporter=Reporter()) worker_initialize(linter=dill.dumps(linter)) @@ -290,8 +299,9 @@ class TestCheckParallel: def test_sequential_checkers_work(self) -> None: """Tests original basic types of checker works as expected in -jN. - This means that a sequential checker should return the same data for a given - file-stream irrespective of whether it's run in -j1 or -jN + This means that a sequential checker should return the same data + for a given file-stream irrespective of whether it's run in -j1 + or -jN """ linter = PyLinter(reporter=Reporter()) @@ -361,7 +371,8 @@ def test_sequential_checkers_work(self) -> None: def test_invoke_single_job(self) -> None: """Tests basic checkers functionality using just a single workderdo. - This is *not* the same -j1 and does not happen under normal operation + This is *not* the same -j1 and does not happen under normal + operation """ linter = PyLinter(reporter=Reporter()) @@ -419,7 +430,8 @@ def test_invoke_single_job(self) -> None: ], ) def test_compare_workers_to_single_proc(self, num_files, num_jobs, num_checkers): - """Compares the 3 key parameters for check_parallel() produces the same results. + """Compares the 3 key parameters for check_parallel() produces the same + results. The intent here is to ensure that the check_parallel() operates on each file, without ordering issues, irrespective of the number of workers used and the @@ -513,7 +525,8 @@ def test_compare_workers_to_single_proc(self, num_files, num_jobs, num_checkers) ], ) def test_map_reduce(self, num_files, num_jobs, num_checkers): - """Compares the 3 key parameters for check_parallel() produces the same results. + """Compares the 3 key parameters for check_parallel() produces the same + results. The intent here is to validate the reduce step: no stats should be lost. diff --git a/tests/test_func.py b/tests/test_func.py index 492797ec21..41ea77a954 100644 --- a/tests/test_func.py +++ b/tests/test_func.py @@ -39,7 +39,7 @@ def exception_str(self, ex) -> str: # pylint: disable=unused-argument """Function used to replace default __str__ method of exception instances - This function is not typed because it is legacy code + This function is not typed because it is legacy code. """ return f"in {ex.file}\n:: {', '.join(ex.args)}" diff --git a/tests/test_regr.py b/tests/test_regr.py index 3ece9c9873..f49ba4aac5 100644 --- a/tests/test_regr.py +++ b/tests/test_regr.py @@ -18,7 +18,7 @@ # For details: https://github.com/PyCQA/pylint/blob/main/LICENSE """Non regression tests for pylint, which requires a too specific configuration -to be incorporated in the automatic functional test framework +to be incorporated in the automatic functional test framework. """ # pylint: disable=redefined-outer-name @@ -51,8 +51,8 @@ def disable(): @pytest.fixture def finalize_linter(linter: PyLinter) -> Iterator[PyLinter]: - """Call reporter.finalize() to clean up pending messages if a test - finished badly. + """Call reporter.finalize() to clean up pending messages if a test finished + badly. """ yield linter linter.reporter = cast( # Due to fixture diff --git a/tests/test_self.py b/tests/test_self.py index 61aa35d325..abdd7f5fc8 100644 --- a/tests/test_self.py +++ b/tests/test_self.py @@ -201,8 +201,9 @@ def _test_output(self, args: List[str], expected_output: str) -> None: def _test_output_file( self, args: List[str], filename: LocalPath, expected_output: str ) -> None: - """Run Pylint with the ``output`` option set (must be included in - the ``args`` passed to this method!) and check the file content afterwards. + """Run Pylint with the ``output`` option set (must be included in the + ``args`` passed to this method!) and check the file content + afterwards. """ out = StringIO() self._run_pylint(args, out=out) @@ -1173,7 +1174,9 @@ def test_fail_on_exit_code(self, args, expected): self._runtest([path, "--fail-under=-10"] + args, code=expected) def test_one_module_fatal_error(self): - """Fatal errors in one of several modules linted still exits non-zero.""" + """Fatal errors in one of several modules linted still exits non- + zero. + """ valid_path = join(HERE, "conftest.py") invalid_path = join(HERE, "garbagePath.py") self._runtest([valid_path, invalid_path], code=1) @@ -1264,7 +1267,9 @@ def test_output_file_specified_in_rcfile(self, tmpdir: LocalPath) -> None: @staticmethod def test_enable_all_extensions() -> None: - """Test to see if --enable-all-extensions does indeed load all extensions.""" + """Test to see if --enable-all-extensions does indeed load all + extensions. + """ # Record all extensions plugins = [] for filename in os.listdir(os.path.dirname(extensions.__file__)): @@ -1280,7 +1285,9 @@ def test_enable_all_extensions() -> None: @staticmethod def test_load_text_repoter_if_not_provided() -> None: - """Test if PyLinter.reporter is a TextReporter if no reporter is provided.""" + """Test if PyLinter.reporter is a TextReporter if no reporter is + provided. + """ linter = PyLinter() assert isinstance(linter.reporter, TextReporter) @@ -1288,8 +1295,10 @@ def test_load_text_repoter_if_not_provided() -> None: @staticmethod def test_regex_paths_csv_validator() -> None: """Test to see if _regexp_paths_csv_validator works. - Previously the validator crashed when encountering already validated values. - Reported in https://github.com/PyCQA/pylint/issues/5437 + + Previously the validator crashed when encountering already + validated values. Reported in + https://github.com/PyCQA/pylint/issues/5437 """ with pytest.raises(SystemExit) as ex: Run(["--ignore-paths", "test", join(HERE, "regrtest_data", "empty.py")]) diff --git a/tests/testutils/data/init_hook.py b/tests/testutils/data/init_hook.py index f28633ea86..4cacb6b0c9 100644 --- a/tests/testutils/data/init_hook.py +++ b/tests/testutils/data/init_hook.py @@ -1,3 +1,3 @@ -"""This file should never be tested as the init-hook in the configuration file prevents -the test runner from getting here. +"""This file should never be tested as the init-hook in the configuration file +prevents the test runner from getting here. """ diff --git a/tests/testutils/test_output_line.py b/tests/testutils/test_output_line.py index 9359fc165a..026b076328 100644 --- a/tests/testutils/test_output_line.py +++ b/tests/testutils/test_output_line.py @@ -53,7 +53,9 @@ def test_output_line() -> None: def test_output_line_from_message(message: Callable) -> None: - """Test that the OutputLine NamedTuple is instantiated correctly with from_msg.""" + """Test that the OutputLine NamedTuple is instantiated correctly with + from_msg. + """ expected_column = 2 if PY38_PLUS else 0 output_line = OutputLine.from_msg(message()) @@ -89,8 +91,8 @@ def test_output_line_from_message(message: Callable) -> None: @pytest.mark.parametrize("confidence", [HIGH, INFERENCE]) def test_output_line_to_csv(confidence: Confidence, message: Callable) -> None: - """Test that the OutputLine NamedTuple is instantiated correctly with from_msg and - then converted to csv. + """Test that the OutputLine NamedTuple is instantiated correctly with + from_msg and then converted to csv. """ output_line = OutputLine.from_msg(message(confidence), True) csv = output_line.to_csv() @@ -152,7 +154,8 @@ def test_output_line_from_csv_error() -> None: def test_output_line_from_csv_deprecated( confidence: Optional[str], expected_confidence: str ) -> None: - """Test that the OutputLine NamedTuple is instantiated correctly with from_csv. + """Test that the OutputLine NamedTuple is instantiated correctly with + from_csv. Test OutputLine's of length 5 or 6. """ @@ -185,7 +188,8 @@ def test_output_line_from_csv_deprecated( def test_output_line_from_csv() -> None: - """Test that the OutputLine NamedTuple is instantiated correctly with from_csv. + """Test that the OutputLine NamedTuple is instantiated correctly with + from_csv. Test OutputLine of length 8. """ diff --git a/tests/testutils/test_package_to_lint.py b/tests/testutils/test_package_to_lint.py index bf90db3e74..5d635afd06 100644 --- a/tests/testutils/test_package_to_lint.py +++ b/tests/testutils/test_package_to_lint.py @@ -35,7 +35,9 @@ def test_package_to_lint() -> None: def test_package_to_lint_default_value() -> None: - """Test that the PackageToLint is instantiated correctly with default value.""" + """Test that the PackageToLint is instantiated correctly with default + value. + """ package_to_lint = PackageToLint( url="https://github.com/pallets/flask.git", branch="main", diff --git a/tests/unittest_reporting.py b/tests/unittest_reporting.py index b414c1c26f..f072df86d2 100644 --- a/tests/unittest_reporting.py +++ b/tests/unittest_reporting.py @@ -94,8 +94,8 @@ def test_template_option_end_line(linter) -> None: def test_template_option_non_existing(linter) -> None: """Test the msg-template option with non-existent options. - This makes sure that this option remains backwards compatible as new parameters do - not break on previous versions + This makes sure that this option remains backwards compatible as new + parameters do not break on previous versions """ output = StringIO() linter.reporter.out = output